• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32 
33 #define __STDC_LIMIT_MACROS
34 
35 // To remove
36 #include <cutils/properties.h>
37 
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include <time.h>
44 #include <sync/sync.h>
45 #include "gralloc_priv.h"
46 
47 // Display dependencies
48 #include "qdMetaData.h"
49 
50 // Camera dependencies
51 #include "android/QCamera3External.h"
52 #include "util/QCameraFlash.h"
53 #include "QCamera3HWI.h"
54 #include "QCamera3VendorTags.h"
55 #include "QCameraTrace.h"
56 
57 extern "C" {
58 #include "mm_camera_dbg.h"
59 }
60 #include "cam_cond.h"
61 
62 using namespace android;
63 
64 namespace qcamera {
65 
66 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67 
68 #define EMPTY_PIPELINE_DELAY 2
69 #define PARTIAL_RESULT_COUNT 2
70 #define FRAME_SKIP_DELAY     0
71 
72 #define MAX_VALUE_8BIT ((1<<8)-1)
73 #define MAX_VALUE_10BIT ((1<<10)-1)
74 #define MAX_VALUE_12BIT ((1<<12)-1)
75 
76 #define VIDEO_4K_WIDTH  3840
77 #define VIDEO_4K_HEIGHT 2160
78 
79 #define MAX_EIS_WIDTH 3840
80 #define MAX_EIS_HEIGHT 2160
81 
82 #define MAX_RAW_STREAMS        1
83 #define MAX_STALLING_STREAMS   1
84 #define MAX_PROCESSED_STREAMS  3
85 /* Batch mode is enabled only if FPS set is equal to or greater than this */
86 #define MIN_FPS_FOR_BATCH_MODE (120)
87 #define PREVIEW_FPS_FOR_HFR    (30)
88 #define DEFAULT_VIDEO_FPS      (30.0)
89 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
90 #define MAX_HFR_BATCH_SIZE     (8)
91 #define REGIONS_TUPLE_COUNT    5
92 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
93 // Set a threshold for detection of missing buffers //seconds
94 #define MISSING_REQUEST_BUF_TIMEOUT 3
95 #define FLUSH_TIMEOUT 3
96 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
97 
98 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
99                                               CAM_QCOM_FEATURE_CROP |\
100                                               CAM_QCOM_FEATURE_ROTATION |\
101                                               CAM_QCOM_FEATURE_SHARPNESS |\
102                                               CAM_QCOM_FEATURE_SCALE |\
103                                               CAM_QCOM_FEATURE_CAC |\
104                                               CAM_QCOM_FEATURE_CDS )
105 /* Per configuration size for static metadata length*/
106 #define PER_CONFIGURATION_SIZE_3 (3)
107 
108 #define TIMEOUT_NEVER -1
109 
110 // Whether to check for the GPU stride padding, or use the default
111 //#define CHECK_GPU_PIXEL_ALIGNMENT
112 
113 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
114 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
115 extern pthread_mutex_t gCamLock;
116 volatile uint32_t gCamHal3LogLevel = 1;
117 extern uint8_t gNumCameraSessions;
118 
119 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
120     {"On",  CAM_CDS_MODE_ON},
121     {"Off", CAM_CDS_MODE_OFF},
122     {"Auto",CAM_CDS_MODE_AUTO}
123 };
124 
125 const QCamera3HardwareInterface::QCameraMap<
126         camera_metadata_enum_android_control_effect_mode_t,
127         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
128     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
129     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
130     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
131     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
132     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
133     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
134     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
135     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
136     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
137 };
138 
139 const QCamera3HardwareInterface::QCameraMap<
140         camera_metadata_enum_android_control_awb_mode_t,
141         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
142     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
143     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
144     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
145     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
146     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
147     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
148     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
149     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
150     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
151 };
152 
153 const QCamera3HardwareInterface::QCameraMap<
154         camera_metadata_enum_android_control_scene_mode_t,
155         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
156     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
157     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
158     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
159     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
160     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
161     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
162     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
163     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
164     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
165     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
166     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
167     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
168     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
169     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
170     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
171     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
172 };
173 
174 const QCamera3HardwareInterface::QCameraMap<
175         camera_metadata_enum_android_control_af_mode_t,
176         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
177     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
178     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
179     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
180     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
181     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
182     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
183     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
184 };
185 
186 const QCamera3HardwareInterface::QCameraMap<
187         camera_metadata_enum_android_color_correction_aberration_mode_t,
188         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
189     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
190             CAM_COLOR_CORRECTION_ABERRATION_OFF },
191     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
192             CAM_COLOR_CORRECTION_ABERRATION_FAST },
193     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
194             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
195 };
196 
197 const QCamera3HardwareInterface::QCameraMap<
198         camera_metadata_enum_android_control_ae_antibanding_mode_t,
199         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
200     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
201     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
202     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
203     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
204 };
205 
206 const QCamera3HardwareInterface::QCameraMap<
207         camera_metadata_enum_android_control_ae_mode_t,
208         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
209     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
210     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
211     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
212     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
213     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
214 };
215 
216 const QCamera3HardwareInterface::QCameraMap<
217         camera_metadata_enum_android_flash_mode_t,
218         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
219     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
220     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
221     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
222 };
223 
224 const QCamera3HardwareInterface::QCameraMap<
225         camera_metadata_enum_android_statistics_face_detect_mode_t,
226         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
227     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
228     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
229     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
230 };
231 
232 const QCamera3HardwareInterface::QCameraMap<
233         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
234         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
235     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
236       CAM_FOCUS_UNCALIBRATED },
237     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
238       CAM_FOCUS_APPROXIMATE },
239     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
240       CAM_FOCUS_CALIBRATED }
241 };
242 
243 const QCamera3HardwareInterface::QCameraMap<
244         camera_metadata_enum_android_lens_state_t,
245         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
246     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
247     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
248 };
249 
250 const int32_t available_thumbnail_sizes[] = {0, 0,
251                                              176, 144,
252                                              240, 144,
253                                              256, 144,
254                                              240, 160,
255                                              256, 154,
256                                              240, 240,
257                                              320, 240};
258 
259 const QCamera3HardwareInterface::QCameraMap<
260         camera_metadata_enum_android_sensor_test_pattern_mode_t,
261         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
262     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
263     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
264     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
265     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
266     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
267     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
268 };
269 
270 /* Since there is no mapping for all the options some Android enum are not listed.
271  * Also, the order in this list is important because while mapping from HAL to Android it will
272  * traverse from lower to higher index which means that for HAL values that are map to different
273  * Android values, the traverse logic will select the first one found.
274  */
275 const QCamera3HardwareInterface::QCameraMap<
276         camera_metadata_enum_android_sensor_reference_illuminant1_t,
277         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
278     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
279     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
280     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
281     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
282     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
283     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
284     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
285     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
286     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
287     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
288     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
289     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
290     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
291     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
292     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
293     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
294 };
295 
296 const QCamera3HardwareInterface::QCameraMap<
297         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
298     { 60, CAM_HFR_MODE_60FPS},
299     { 90, CAM_HFR_MODE_90FPS},
300     { 120, CAM_HFR_MODE_120FPS},
301     { 150, CAM_HFR_MODE_150FPS},
302     { 180, CAM_HFR_MODE_180FPS},
303     { 210, CAM_HFR_MODE_210FPS},
304     { 240, CAM_HFR_MODE_240FPS},
305     { 480, CAM_HFR_MODE_480FPS},
306 };
307 
308 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
309     .initialize                         = QCamera3HardwareInterface::initialize,
310     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
311     .register_stream_buffers            = NULL,
312     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
313     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
314     .get_metadata_vendor_tag_ops        = NULL,
315     .dump                               = QCamera3HardwareInterface::dump,
316     .flush                              = QCamera3HardwareInterface::flush,
317     .reserved                           = {0},
318 };
319 
320 // initialise to some default value
321 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
322 
323 /*===========================================================================
324  * FUNCTION   : QCamera3HardwareInterface
325  *
326  * DESCRIPTION: constructor of QCamera3HardwareInterface
327  *
328  * PARAMETERS :
329  *   @cameraId  : camera ID
330  *
331  * RETURN     : none
332  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)333 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
334         const camera_module_callbacks_t *callbacks)
335     : mCameraId(cameraId),
336       mCameraHandle(NULL),
337       mCameraInitialized(false),
338       mCallbackOps(NULL),
339       mMetadataChannel(NULL),
340       mPictureChannel(NULL),
341       mRawChannel(NULL),
342       mSupportChannel(NULL),
343       mAnalysisChannel(NULL),
344       mRawDumpChannel(NULL),
345       mDummyBatchChannel(NULL),
346       m_perfLock(),
347       mCommon(),
348       mChannelHandle(0),
349       mFirstConfiguration(true),
350       mFlush(false),
351       mFlushPerf(false),
352       mParamHeap(NULL),
353       mParameters(NULL),
354       mPrevParameters(NULL),
355       m_bIsVideo(false),
356       m_bIs4KVideo(false),
357       m_bEisSupportedSize(false),
358       m_bEisEnable(false),
359       m_MobicatMask(0),
360       mMinProcessedFrameDuration(0),
361       mMinJpegFrameDuration(0),
362       mMinRawFrameDuration(0),
363       mMetaFrameCount(0U),
364       mUpdateDebugLevel(false),
365       mCallbacks(callbacks),
366       mCaptureIntent(0),
367       mCacMode(0),
368       mHybridAeEnable(0),
369       /* DevCamDebug metadata internal m control*/
370       mDevCamDebugMetaEnable(0),
371       /* DevCamDebug metadata end */
372       mBatchSize(0),
373       mToBeQueuedVidBufs(0),
374       mHFRVideoFps(DEFAULT_VIDEO_FPS),
375       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
376       mFirstFrameNumberInBatch(0),
377       mNeedSensorRestart(false),
378       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
379       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
380       mLdafCalibExist(false),
381       mPowerHintEnabled(false),
382       mLastCustIntentFrmNum(-1),
383       mState(CLOSED),
384       mIsDeviceLinked(false),
385       mIsMainCamera(true),
386       mLinkedCameraId(0),
387       m_pRelCamSyncHeap(NULL),
388       m_pRelCamSyncBuf(NULL)
389 {
390     getLogLevel();
391     m_perfLock.lock_init();
392     mCommon.init(gCamCapability[cameraId]);
393     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
394     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
395     mCameraDevice.common.close = close_camera_device;
396     mCameraDevice.ops = &mCameraOps;
397     mCameraDevice.priv = this;
398     gCamCapability[cameraId]->version = CAM_HAL_V3;
399     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
400     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
401     gCamCapability[cameraId]->min_num_pp_bufs = 3;
402 
403     PTHREAD_COND_INIT(&mBuffersCond);
404 
405     PTHREAD_COND_INIT(&mRequestCond);
406     mPendingLiveRequest = 0;
407     mCurrentRequestId = -1;
408     pthread_mutex_init(&mMutex, NULL);
409 
410     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
411         mDefaultMetadata[i] = NULL;
412 
413     // Getting system props of different kinds
414     char prop[PROPERTY_VALUE_MAX];
415     memset(prop, 0, sizeof(prop));
416     property_get("persist.camera.raw.dump", prop, "0");
417     mEnableRawDump = atoi(prop);
418     if (mEnableRawDump)
419         LOGD("Raw dump from Camera HAL enabled");
420 
421     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
422     memset(mLdafCalib, 0, sizeof(mLdafCalib));
423 
424     memset(prop, 0, sizeof(prop));
425     property_get("persist.camera.tnr.preview", prop, "0");
426     m_bTnrPreview = (uint8_t)atoi(prop);
427 
428     memset(prop, 0, sizeof(prop));
429     property_get("persist.camera.tnr.video", prop, "0");
430     m_bTnrVideo = (uint8_t)atoi(prop);
431 
432     memset(prop, 0, sizeof(prop));
433     property_get("persist.camera.avtimer.debug", prop, "0");
434     m_debug_avtimer = (uint8_t)atoi(prop);
435 
436     //Load and read GPU library.
437     lib_surface_utils = NULL;
438     LINK_get_surface_pixel_alignment = NULL;
439     mSurfaceStridePadding = CAM_PAD_TO_64;
440 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
441     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
442     if (lib_surface_utils) {
443         *(void **)&LINK_get_surface_pixel_alignment =
444                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
445          if (LINK_get_surface_pixel_alignment) {
446              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
447          }
448          dlclose(lib_surface_utils);
449     }
450 #endif
451     m60HzZone = is60HzZone();
452 }
453 
454 /*===========================================================================
455  * FUNCTION   : ~QCamera3HardwareInterface
456  *
457  * DESCRIPTION: destructor of QCamera3HardwareInterface
458  *
459  * PARAMETERS : none
460  *
461  * RETURN     : none
462  *==========================================================================*/
~QCamera3HardwareInterface()463 QCamera3HardwareInterface::~QCamera3HardwareInterface()
464 {
465     LOGD("E");
466 
467     /* Turn off current power hint before acquiring perfLock in case they
468      * conflict with each other */
469     disablePowerHint();
470 
471     m_perfLock.lock_acq();
472 
473     /* We need to stop all streams before deleting any stream */
474     if (mRawDumpChannel) {
475         mRawDumpChannel->stop();
476     }
477 
478     // NOTE: 'camera3_stream_t *' objects are already freed at
479     //        this stage by the framework
480     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
481         it != mStreamInfo.end(); it++) {
482         QCamera3ProcessingChannel *channel = (*it)->channel;
483         if (channel) {
484             channel->stop();
485         }
486     }
487     if (mSupportChannel)
488         mSupportChannel->stop();
489 
490     if (mAnalysisChannel) {
491         mAnalysisChannel->stop();
492     }
493     if (mMetadataChannel) {
494         mMetadataChannel->stop();
495     }
496     if (mChannelHandle) {
497         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
498                 mChannelHandle);
499         LOGD("stopping channel %d", mChannelHandle);
500     }
501 
502     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
503         it != mStreamInfo.end(); it++) {
504         QCamera3ProcessingChannel *channel = (*it)->channel;
505         if (channel)
506             delete channel;
507         free (*it);
508     }
509     if (mSupportChannel) {
510         delete mSupportChannel;
511         mSupportChannel = NULL;
512     }
513 
514     if (mAnalysisChannel) {
515         delete mAnalysisChannel;
516         mAnalysisChannel = NULL;
517     }
518     if (mRawDumpChannel) {
519         delete mRawDumpChannel;
520         mRawDumpChannel = NULL;
521     }
522     if (mDummyBatchChannel) {
523         delete mDummyBatchChannel;
524         mDummyBatchChannel = NULL;
525     }
526     mPictureChannel = NULL;
527 
528     if (mMetadataChannel) {
529         delete mMetadataChannel;
530         mMetadataChannel = NULL;
531     }
532 
533     /* Clean up all channels */
534     if (mCameraInitialized) {
535         if(!mFirstConfiguration){
536             //send the last unconfigure
537             cam_stream_size_info_t stream_config_info;
538             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
539             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
540             stream_config_info.buffer_info.max_buffers =
541                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
542             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
543                     stream_config_info);
544             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
545             if (rc < 0) {
546                 LOGE("set_parms failed for unconfigure");
547             }
548         }
549         deinitParameters();
550     }
551 
552     if (mChannelHandle) {
553         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
554                 mChannelHandle);
555         LOGH("deleting channel %d", mChannelHandle);
556         mChannelHandle = 0;
557     }
558 
559     if (mState != CLOSED)
560         closeCamera();
561 
562     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
563         req.mPendingBufferList.clear();
564     }
565     mPendingBuffersMap.mPendingBuffersInRequest.clear();
566     mPendingReprocessResultList.clear();
567     for (pendingRequestIterator i = mPendingRequestsList.begin();
568             i != mPendingRequestsList.end();) {
569         i = erasePendingRequest(i);
570     }
571     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
572         if (mDefaultMetadata[i])
573             free_camera_metadata(mDefaultMetadata[i]);
574 
575     m_perfLock.lock_rel();
576     m_perfLock.lock_deinit();
577 
578     pthread_cond_destroy(&mRequestCond);
579 
580     pthread_cond_destroy(&mBuffersCond);
581 
582     pthread_mutex_destroy(&mMutex);
583     LOGD("X");
584 }
585 
586 /*===========================================================================
587  * FUNCTION   : erasePendingRequest
588  *
589  * DESCRIPTION: function to erase a desired pending request after freeing any
590  *              allocated memory
591  *
592  * PARAMETERS :
593  *   @i       : iterator pointing to pending request to be erased
594  *
595  * RETURN     : iterator pointing to the next request
596  *==========================================================================*/
597 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)598         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
599 {
600     if (i->input_buffer != NULL) {
601         free(i->input_buffer);
602         i->input_buffer = NULL;
603     }
604     if (i->settings != NULL)
605         free_camera_metadata((camera_metadata_t*)i->settings);
606     return mPendingRequestsList.erase(i);
607 }
608 
609 /*===========================================================================
610  * FUNCTION   : camEvtHandle
611  *
612  * DESCRIPTION: Function registered to mm-camera-interface to handle events
613  *
614  * PARAMETERS :
615  *   @camera_handle : interface layer camera handle
616  *   @evt           : ptr to event
617  *   @user_data     : user data ptr
618  *
619  * RETURN     : none
620  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)621 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
622                                           mm_camera_event_t *evt,
623                                           void *user_data)
624 {
625     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
626     if (obj && evt) {
627         switch(evt->server_event_type) {
628             case CAM_EVENT_TYPE_DAEMON_DIED:
629                 pthread_mutex_lock(&obj->mMutex);
630                 obj->mState = ERROR;
631                 pthread_mutex_unlock(&obj->mMutex);
632                 LOGE("Fatal, camera daemon died");
633                 break;
634 
635             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
636                 LOGD("HAL got request pull from Daemon");
637                 pthread_mutex_lock(&obj->mMutex);
638                 obj->mWokenUpByDaemon = true;
639                 obj->unblockRequestIfNecessary();
640                 pthread_mutex_unlock(&obj->mMutex);
641                 break;
642 
643             default:
644                 LOGW("Warning: Unhandled event %d",
645                         evt->server_event_type);
646                 break;
647         }
648     } else {
649         LOGE("NULL user_data/evt");
650     }
651 }
652 
653 /*===========================================================================
654  * FUNCTION   : openCamera
655  *
656  * DESCRIPTION: open camera
657  *
658  * PARAMETERS :
659  *   @hw_device  : double ptr for camera device struct
660  *
661  * RETURN     : int32_t type of status
662  *              NO_ERROR  -- success
663  *              none-zero failure code
664  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)665 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
666 {
667     int rc = 0;
668     if (mState != CLOSED) {
669         *hw_device = NULL;
670         return PERMISSION_DENIED;
671     }
672 
673     m_perfLock.lock_acq();
674     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
675              mCameraId);
676 
677     rc = openCamera();
678     if (rc == 0) {
679         *hw_device = &mCameraDevice.common;
680     } else
681         *hw_device = NULL;
682 
683     m_perfLock.lock_rel();
684     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
685              mCameraId, rc);
686 
687     if (rc == NO_ERROR) {
688         mState = OPENED;
689     }
690     return rc;
691 }
692 
693 /*===========================================================================
694  * FUNCTION   : openCamera
695  *
696  * DESCRIPTION: open camera
697  *
698  * PARAMETERS : none
699  *
700  * RETURN     : int32_t type of status
701  *              NO_ERROR  -- success
702  *              none-zero failure code
703  *==========================================================================*/
openCamera()704 int QCamera3HardwareInterface::openCamera()
705 {
706     int rc = 0;
707     char value[PROPERTY_VALUE_MAX];
708 
709     KPI_ATRACE_CALL();
710     if (mCameraHandle) {
711         LOGE("Failure: Camera already opened");
712         return ALREADY_EXISTS;
713     }
714 
715     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
716     if (rc < 0) {
717         LOGE("Failed to reserve flash for camera id: %d",
718                 mCameraId);
719         return UNKNOWN_ERROR;
720     }
721 
722     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
723     if (rc) {
724         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
725         return rc;
726     }
727 
728     if (!mCameraHandle) {
729         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
730         return -ENODEV;
731     }
732 
733     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
734             camEvtHandle, (void *)this);
735 
736     if (rc < 0) {
737         LOGE("Error, failed to register event callback");
738         /* Not closing camera here since it is already handled in destructor */
739         return FAILED_TRANSACTION;
740     }
741 
742     mExifParams.debug_params =
743             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
744     if (mExifParams.debug_params) {
745         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
746     } else {
747         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
748         return NO_MEMORY;
749     }
750     mFirstConfiguration = true;
751 
752     //Notify display HAL that a camera session is active.
753     //But avoid calling the same during bootup because camera service might open/close
754     //cameras at boot time during its initialization and display service will also internally
755     //wait for camera service to initialize first while calling this display API, resulting in a
756     //deadlock situation. Since boot time camera open/close calls are made only to fetch
757     //capabilities, no need of this display bw optimization.
758     //Use "service.bootanim.exit" property to know boot status.
759     property_get("service.bootanim.exit", value, "0");
760     if (atoi(value) == 1) {
761         pthread_mutex_lock(&gCamLock);
762         if (gNumCameraSessions++ == 0) {
763             setCameraLaunchStatus(true);
764         }
765         pthread_mutex_unlock(&gCamLock);
766     }
767 
768     //fill the session id needed while linking dual cam
769     pthread_mutex_lock(&gCamLock);
770     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
771         &sessionId[mCameraId]);
772     pthread_mutex_unlock(&gCamLock);
773 
774     if (rc < 0) {
775         LOGE("Error, failed to get sessiion id");
776         return UNKNOWN_ERROR;
777     } else {
778         //Allocate related cam sync buffer
779         //this is needed for the payload that goes along with bundling cmd for related
780         //camera use cases
781         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
782         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
783         if(rc != OK) {
784             rc = NO_MEMORY;
785             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
786             return NO_MEMORY;
787         }
788 
789         //Map memory for related cam sync buffer
790         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
791                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
792                 m_pRelCamSyncHeap->getFd(0),
793                 sizeof(cam_sync_related_sensors_event_info_t),
794                 m_pRelCamSyncHeap->getPtr(0));
795         if(rc < 0) {
796             LOGE("Dualcam: failed to map Related cam sync buffer");
797             rc = FAILED_TRANSACTION;
798             return NO_MEMORY;
799         }
800         m_pRelCamSyncBuf =
801                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
802     }
803 
804     LOGH("mCameraId=%d",mCameraId);
805 
806     return NO_ERROR;
807 }
808 
809 /*===========================================================================
810  * FUNCTION   : closeCamera
811  *
812  * DESCRIPTION: close camera
813  *
814  * PARAMETERS : none
815  *
816  * RETURN     : int32_t type of status
817  *              NO_ERROR  -- success
818  *              none-zero failure code
819  *==========================================================================*/
closeCamera()820 int QCamera3HardwareInterface::closeCamera()
821 {
822     KPI_ATRACE_CALL();
823     int rc = NO_ERROR;
824     char value[PROPERTY_VALUE_MAX];
825 
826     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
827              mCameraId);
828     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
829     mCameraHandle = NULL;
830 
831     //reset session id to some invalid id
832     pthread_mutex_lock(&gCamLock);
833     sessionId[mCameraId] = 0xDEADBEEF;
834     pthread_mutex_unlock(&gCamLock);
835 
836     //Notify display HAL that there is no active camera session
837     //but avoid calling the same during bootup. Refer to openCamera
838     //for more details.
839     property_get("service.bootanim.exit", value, "0");
840     if (atoi(value) == 1) {
841         pthread_mutex_lock(&gCamLock);
842         if (--gNumCameraSessions == 0) {
843             setCameraLaunchStatus(false);
844         }
845         pthread_mutex_unlock(&gCamLock);
846     }
847 
848     if (NULL != m_pRelCamSyncHeap) {
849         m_pRelCamSyncHeap->deallocate();
850         delete m_pRelCamSyncHeap;
851         m_pRelCamSyncHeap = NULL;
852         m_pRelCamSyncBuf = NULL;
853     }
854 
855     if (mExifParams.debug_params) {
856         free(mExifParams.debug_params);
857         mExifParams.debug_params = NULL;
858     }
859     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
860         LOGW("Failed to release flash for camera id: %d",
861                 mCameraId);
862     }
863     mState = CLOSED;
864     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
865          mCameraId, rc);
866     return rc;
867 }
868 
869 /*===========================================================================
870  * FUNCTION   : initialize
871  *
872  * DESCRIPTION: Initialize frameworks callback functions
873  *
874  * PARAMETERS :
875  *   @callback_ops : callback function to frameworks
876  *
877  * RETURN     :
878  *
879  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)880 int QCamera3HardwareInterface::initialize(
881         const struct camera3_callback_ops *callback_ops)
882 {
883     ATRACE_CALL();
884     int rc;
885 
886     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
887     pthread_mutex_lock(&mMutex);
888 
889     // Validate current state
890     switch (mState) {
891         case OPENED:
892             /* valid state */
893             break;
894         default:
895             LOGE("Invalid state %d", mState);
896             rc = -ENODEV;
897             goto err1;
898     }
899 
900     rc = initParameters();
901     if (rc < 0) {
902         LOGE("initParamters failed %d", rc);
903         goto err1;
904     }
905     mCallbackOps = callback_ops;
906 
907     mChannelHandle = mCameraHandle->ops->add_channel(
908             mCameraHandle->camera_handle, NULL, NULL, this);
909     if (mChannelHandle == 0) {
910         LOGE("add_channel failed");
911         rc = -ENOMEM;
912         pthread_mutex_unlock(&mMutex);
913         return rc;
914     }
915 
916     pthread_mutex_unlock(&mMutex);
917     mCameraInitialized = true;
918     mState = INITIALIZED;
919     LOGI("X");
920     return 0;
921 
922 err1:
923     pthread_mutex_unlock(&mMutex);
924     return rc;
925 }
926 
927 /*===========================================================================
928  * FUNCTION   : validateStreamDimensions
929  *
930  * DESCRIPTION: Check if the configuration requested are those advertised
931  *
932  * PARAMETERS :
933  *   @stream_list : streams to be configured
934  *
935  * RETURN     :
936  *
937  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)938 int QCamera3HardwareInterface::validateStreamDimensions(
939         camera3_stream_configuration_t *streamList)
940 {
941     int rc = NO_ERROR;
942     size_t count = 0;
943 
944     camera3_stream_t *inputStream = NULL;
945     /*
946     * Loop through all streams to find input stream if it exists*
947     */
948     for (size_t i = 0; i< streamList->num_streams; i++) {
949         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
950             if (inputStream != NULL) {
951                 LOGE("Error, Multiple input streams requested");
952                 return -EINVAL;
953             }
954             inputStream = streamList->streams[i];
955         }
956     }
957     /*
958     * Loop through all streams requested in configuration
959     * Check if unsupported sizes have been requested on any of them
960     */
961     for (size_t j = 0; j < streamList->num_streams; j++) {
962         bool sizeFound = false;
963         camera3_stream_t *newStream = streamList->streams[j];
964 
965         uint32_t rotatedHeight = newStream->height;
966         uint32_t rotatedWidth = newStream->width;
967         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
968                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
969             rotatedHeight = newStream->width;
970             rotatedWidth = newStream->height;
971         }
972 
973         /*
974         * Sizes are different for each type of stream format check against
975         * appropriate table.
976         */
977         switch (newStream->format) {
978         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
979         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
980         case HAL_PIXEL_FORMAT_RAW10:
981             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
982             for (size_t i = 0; i < count; i++) {
983                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
984                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
985                     sizeFound = true;
986                     break;
987                 }
988             }
989             break;
990         case HAL_PIXEL_FORMAT_BLOB:
991             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
992             /* Verify set size against generated sizes table */
993             for (size_t i = 0; i < count; i++) {
994                 if (((int32_t)rotatedWidth ==
995                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
996                         ((int32_t)rotatedHeight ==
997                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
998                     sizeFound = true;
999                     break;
1000                 }
1001             }
1002             break;
1003         case HAL_PIXEL_FORMAT_YCbCr_420_888:
1004         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1005         default:
1006             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1007                     || newStream->stream_type == CAMERA3_STREAM_INPUT
1008                     || IS_USAGE_ZSL(newStream->usage)) {
1009                 if (((int32_t)rotatedWidth ==
1010                                 gCamCapability[mCameraId]->active_array_size.width) &&
1011                                 ((int32_t)rotatedHeight ==
1012                                 gCamCapability[mCameraId]->active_array_size.height)) {
1013                     sizeFound = true;
1014                     break;
1015                 }
1016                 /* We could potentially break here to enforce ZSL stream
1017                  * set from frameworks always is full active array size
1018                  * but it is not clear from the spc if framework will always
1019                  * follow that, also we have logic to override to full array
1020                  * size, so keeping the logic lenient at the moment
1021                  */
1022             }
1023             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1024                     MAX_SIZES_CNT);
1025             for (size_t i = 0; i < count; i++) {
1026                 if (((int32_t)rotatedWidth ==
1027                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1028                             ((int32_t)rotatedHeight ==
1029                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1030                     sizeFound = true;
1031                     break;
1032                 }
1033             }
1034             break;
1035         } /* End of switch(newStream->format) */
1036 
1037         /* We error out even if a single stream has unsupported size set */
1038         if (!sizeFound) {
1039             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1040                     rotatedWidth, rotatedHeight, newStream->format,
1041                     gCamCapability[mCameraId]->active_array_size.width,
1042                     gCamCapability[mCameraId]->active_array_size.height);
1043             rc = -EINVAL;
1044             break;
1045         }
1046     } /* End of for each stream */
1047     return rc;
1048 }
1049 
1050 /*===========================================================================
1051  * FUNCTION   : validateUsageFlags
1052  *
1053  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1054  *
1055  * PARAMETERS :
1056  *   @stream_list : streams to be configured
1057  *
1058  * RETURN     :
1059  *   NO_ERROR if the usage flags are supported
1060  *   error code if usage flags are not supported
1061  *
1062  *==========================================================================*/
validateUsageFlags(const camera3_stream_configuration_t * streamList)1063 int QCamera3HardwareInterface::validateUsageFlags(
1064         const camera3_stream_configuration_t* streamList)
1065 {
1066     for (size_t j = 0; j < streamList->num_streams; j++) {
1067         const camera3_stream_t *newStream = streamList->streams[j];
1068 
1069         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1070             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1071              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1072             continue;
1073         }
1074 
1075         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1076         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1077         bool isZSL = IS_USAGE_ZSL(newStream->usage);
1078         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1079                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height);
1080         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1081                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height);
1082         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1083                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height);
1084 
1085         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1086         // So color spaces will always match.
1087 
1088         // Check whether underlying formats of shared streams match.
1089         if (isVideo && isPreview && videoFormat != previewFormat) {
1090             LOGE("Combined video and preview usage flag is not supported");
1091             return -EINVAL;
1092         }
1093         if (isPreview && isZSL && previewFormat != zslFormat) {
1094             LOGE("Combined preview and zsl usage flag is not supported");
1095             return -EINVAL;
1096         }
1097         if (isVideo && isZSL && videoFormat != zslFormat) {
1098             LOGE("Combined video and zsl usage flag is not supported");
1099             return -EINVAL;
1100         }
1101     }
1102     return NO_ERROR;
1103 }
1104 
1105 /*===========================================================================
1106  * FUNCTION   : validateUsageFlagsForEis
1107  *
1108  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1109  *
1110  * PARAMETERS :
1111  *   @stream_list : streams to be configured
1112  *
1113  * RETURN     :
1114  *   NO_ERROR if the usage flags are supported
1115  *   error code if usage flags are not supported
1116  *
1117  *==========================================================================*/
validateUsageFlagsForEis(const camera3_stream_configuration_t * streamList)1118 int QCamera3HardwareInterface::validateUsageFlagsForEis(
1119         const camera3_stream_configuration_t* streamList)
1120 {
1121     for (size_t j = 0; j < streamList->num_streams; j++) {
1122         const camera3_stream_t *newStream = streamList->streams[j];
1123 
1124         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1125         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1126 
1127         // Because EIS is "hard-coded" for certain use case, and current
1128         // implementation doesn't support shared preview and video on the same
1129         // stream, return failure if EIS is forced on.
1130         if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1131             LOGE("Combined video and preview usage flag is not supported due to EIS");
1132             return -EINVAL;
1133         }
1134     }
1135     return NO_ERROR;
1136 }
1137 
1138 
1139 /*==============================================================================
1140  * FUNCTION   : isSupportChannelNeeded
1141  *
1142  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1143  *
1144  * PARAMETERS :
1145  *   @stream_list : streams to be configured
1146  *   @stream_config_info : the config info for streams to be configured
1147  *
1148  * RETURN     : Boolen true/false decision
1149  *
1150  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1151 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1152         camera3_stream_configuration_t *streamList,
1153         cam_stream_size_info_t stream_config_info)
1154 {
1155     uint32_t i;
1156     bool pprocRequested = false;
1157     /* Check for conditions where PProc pipeline does not have any streams*/
1158     for (i = 0; i < stream_config_info.num_streams; i++) {
1159         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1160                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1161             pprocRequested = true;
1162             break;
1163         }
1164     }
1165 
1166     if (pprocRequested == false )
1167         return true;
1168 
1169     /* Dummy stream needed if only raw or jpeg streams present */
1170     for (i = 0; i < streamList->num_streams; i++) {
1171         switch(streamList->streams[i]->format) {
1172             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1173             case HAL_PIXEL_FORMAT_RAW10:
1174             case HAL_PIXEL_FORMAT_RAW16:
1175             case HAL_PIXEL_FORMAT_BLOB:
1176                 break;
1177             default:
1178                 return false;
1179         }
1180     }
1181     return true;
1182 }
1183 
1184 /*==============================================================================
1185  * FUNCTION   : getSensorOutputSize
1186  *
1187  * DESCRIPTION: Get sensor output size based on current stream configuratoin
1188  *
1189  * PARAMETERS :
1190  *   @sensor_dim : sensor output dimension (output)
1191  *
1192  * RETURN     : int32_t type of status
1193  *              NO_ERROR  -- success
1194  *              none-zero failure code
1195  *
1196  *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)1197 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1198 {
1199     int32_t rc = NO_ERROR;
1200 
1201     cam_dimension_t max_dim = {0, 0};
1202     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1203         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1204             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1205         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1206             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1207     }
1208 
1209     clear_metadata_buffer(mParameters);
1210 
1211     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1212             max_dim);
1213     if (rc != NO_ERROR) {
1214         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1215         return rc;
1216     }
1217 
1218     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1219     if (rc != NO_ERROR) {
1220         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1221         return rc;
1222     }
1223 
1224     clear_metadata_buffer(mParameters);
1225     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1226 
1227     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1228             mParameters);
1229     if (rc != NO_ERROR) {
1230         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1231         return rc;
1232     }
1233 
1234     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1235     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1236 
1237     return rc;
1238 }
1239 
1240 /*==============================================================================
1241  * FUNCTION   : enablePowerHint
1242  *
1243  * DESCRIPTION: enable single powerhint for preview and different video modes.
1244  *
1245  * PARAMETERS :
1246  *
1247  * RETURN     : NULL
1248  *
1249  *==========================================================================*/
enablePowerHint()1250 void QCamera3HardwareInterface::enablePowerHint()
1251 {
1252     if (!mPowerHintEnabled) {
1253         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1254         mPowerHintEnabled = true;
1255     }
1256 }
1257 
1258 /*==============================================================================
1259  * FUNCTION   : disablePowerHint
1260  *
1261  * DESCRIPTION: disable current powerhint.
1262  *
1263  * PARAMETERS :
1264  *
1265  * RETURN     : NULL
1266  *
1267  *==========================================================================*/
disablePowerHint()1268 void QCamera3HardwareInterface::disablePowerHint()
1269 {
1270     if (mPowerHintEnabled) {
1271         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1272         mPowerHintEnabled = false;
1273     }
1274 }
1275 
1276 /*==============================================================================
1277  * FUNCTION   : addToPPFeatureMask
1278  *
1279  * DESCRIPTION: add additional features to pp feature mask based on
1280  *              stream type and usecase
1281  *
1282  * PARAMETERS :
1283  *   @stream_format : stream type for feature mask
1284  *   @stream_idx : stream idx within postprocess_mask list to change
1285  *
1286  * RETURN     : NULL
1287  *
1288  *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1289 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1290         uint32_t stream_idx)
1291 {
1292     char feature_mask_value[PROPERTY_VALUE_MAX];
1293     cam_feature_mask_t feature_mask;
1294     int args_converted;
1295     int property_len;
1296 
1297     /* Get feature mask from property */
1298     property_len = property_get("persist.camera.hal3.feature",
1299             feature_mask_value, "0");
1300     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1301             (feature_mask_value[1] == 'x')) {
1302         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1303     } else {
1304         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1305     }
1306     if (1 != args_converted) {
1307         feature_mask = 0;
1308         LOGE("Wrong feature mask %s", feature_mask_value);
1309         return;
1310     }
1311 
1312     switch (stream_format) {
1313     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1314         /* Add LLVD to pp feature mask only if video hint is enabled */
1315         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1316             mStreamConfigInfo.postprocess_mask[stream_idx]
1317                     |= CAM_QTI_FEATURE_SW_TNR;
1318             LOGH("Added SW TNR to pp feature mask");
1319         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1320             mStreamConfigInfo.postprocess_mask[stream_idx]
1321                     |= CAM_QCOM_FEATURE_LLVD;
1322             LOGH("Added LLVD SeeMore to pp feature mask");
1323         }
1324         break;
1325     }
1326     default:
1327         break;
1328     }
1329     LOGD("PP feature mask %llx",
1330             mStreamConfigInfo.postprocess_mask[stream_idx]);
1331 }
1332 
1333 /*==============================================================================
1334  * FUNCTION   : updateFpsInPreviewBuffer
1335  *
1336  * DESCRIPTION: update FPS information in preview buffer.
1337  *
1338  * PARAMETERS :
1339  *   @metadata    : pointer to metadata buffer
1340  *   @frame_number: frame_number to look for in pending buffer list
1341  *
1342  * RETURN     : None
1343  *
1344  *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1345 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1346         uint32_t frame_number)
1347 {
1348     // Mark all pending buffers for this particular request
1349     // with corresponding framerate information
1350     for (List<PendingBuffersInRequest>::iterator req =
1351             mPendingBuffersMap.mPendingBuffersInRequest.begin();
1352             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1353         for(List<PendingBufferInfo>::iterator j =
1354                 req->mPendingBufferList.begin();
1355                 j != req->mPendingBufferList.end(); j++) {
1356             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1357             if ((req->frame_number == frame_number) &&
1358                 (channel->getStreamTypeMask() &
1359                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1360                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1361                     CAM_INTF_PARM_FPS_RANGE, metadata) {
1362                     int32_t cameraFps = float_range->max_fps;
1363                     struct private_handle_t *priv_handle =
1364                         (struct private_handle_t *)(*(j->buffer));
1365                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1366                 }
1367             }
1368         }
1369     }
1370 }
1371 
1372 /*==============================================================================
1373  * FUNCTION   : updateTimeStampInPendingBuffers
1374  *
1375  * DESCRIPTION: update timestamp in display metadata for all pending buffers
1376  *              of a frame number
1377  *
1378  * PARAMETERS :
1379  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1380  *   @timestamp   : timestamp to be set
1381  *
1382  * RETURN     : None
1383  *
1384  *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1385 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1386         uint32_t frameNumber, nsecs_t timestamp)
1387 {
1388     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1389             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1390         if (req->frame_number != frameNumber)
1391             continue;
1392 
1393         for (auto k = req->mPendingBufferList.begin();
1394                 k != req->mPendingBufferList.end(); k++ ) {
1395             struct private_handle_t *priv_handle =
1396                     (struct private_handle_t *) (*(k->buffer));
1397             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1398         }
1399     }
1400     return;
1401 }
1402 
1403 /*===========================================================================
1404  * FUNCTION   : configureStreams
1405  *
1406  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1407  *              and output streams.
1408  *
1409  * PARAMETERS :
1410  *   @stream_list : streams to be configured
1411  *
1412  * RETURN     :
1413  *
1414  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1415 int QCamera3HardwareInterface::configureStreams(
1416         camera3_stream_configuration_t *streamList)
1417 {
1418     ATRACE_CALL();
1419     int rc = 0;
1420 
1421     // Acquire perfLock before configure streams
1422     m_perfLock.lock_acq();
1423     rc = configureStreamsPerfLocked(streamList);
1424     m_perfLock.lock_rel();
1425 
1426     return rc;
1427 }
1428 
1429 /*===========================================================================
1430  * FUNCTION   : configureStreamsPerfLocked
1431  *
1432  * DESCRIPTION: configureStreams while perfLock is held.
1433  *
1434  * PARAMETERS :
1435  *   @stream_list : streams to be configured
1436  *
1437  * RETURN     : int32_t type of status
1438  *              NO_ERROR  -- success
1439  *              none-zero failure code
1440  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1441 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1442         camera3_stream_configuration_t *streamList)
1443 {
1444     ATRACE_CALL();
1445     int rc = 0;
1446 
1447     // Sanity check stream_list
1448     if (streamList == NULL) {
1449         LOGE("NULL stream configuration");
1450         return BAD_VALUE;
1451     }
1452     if (streamList->streams == NULL) {
1453         LOGE("NULL stream list");
1454         return BAD_VALUE;
1455     }
1456 
1457     if (streamList->num_streams < 1) {
1458         LOGE("Bad number of streams requested: %d",
1459                 streamList->num_streams);
1460         return BAD_VALUE;
1461     }
1462 
1463     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1464         LOGE("Maximum number of streams %d exceeded: %d",
1465                 MAX_NUM_STREAMS, streamList->num_streams);
1466         return BAD_VALUE;
1467     }
1468 
1469     rc = validateUsageFlags(streamList);
1470     if (rc != NO_ERROR) {
1471         return rc;
1472     }
1473 
1474     mOpMode = streamList->operation_mode;
1475     LOGD("mOpMode: %d", mOpMode);
1476 
1477     /* first invalidate all the steams in the mStreamList
1478      * if they appear again, they will be validated */
1479     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1480             it != mStreamInfo.end(); it++) {
1481         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1482         if (channel) {
1483           channel->stop();
1484         }
1485         (*it)->status = INVALID;
1486     }
1487 
1488     if (mRawDumpChannel) {
1489         mRawDumpChannel->stop();
1490         delete mRawDumpChannel;
1491         mRawDumpChannel = NULL;
1492     }
1493 
1494     if (mSupportChannel)
1495         mSupportChannel->stop();
1496 
1497     if (mAnalysisChannel) {
1498         mAnalysisChannel->stop();
1499     }
1500     if (mMetadataChannel) {
1501         /* If content of mStreamInfo is not 0, there is metadata stream */
1502         mMetadataChannel->stop();
1503     }
1504     if (mChannelHandle) {
1505         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1506                 mChannelHandle);
1507         LOGD("stopping channel %d", mChannelHandle);
1508     }
1509 
1510     pthread_mutex_lock(&mMutex);
1511 
1512     // Check state
1513     switch (mState) {
1514         case INITIALIZED:
1515         case CONFIGURED:
1516         case STARTED:
1517             /* valid state */
1518             break;
1519         default:
1520             LOGE("Invalid state %d", mState);
1521             pthread_mutex_unlock(&mMutex);
1522             return -ENODEV;
1523     }
1524 
1525     /* Check whether we have video stream */
1526     m_bIs4KVideo = false;
1527     m_bIsVideo = false;
1528     m_bEisSupportedSize = false;
1529     m_bTnrEnabled = false;
1530     bool isZsl = false;
1531     bool isPreview = false;
1532     uint32_t videoWidth = 0U;
1533     uint32_t videoHeight = 0U;
1534     size_t rawStreamCnt = 0;
1535     size_t stallStreamCnt = 0;
1536     size_t processedStreamCnt = 0;
1537     // Number of streams on ISP encoder path
1538     size_t numStreamsOnEncoder = 0;
1539     size_t numYuv888OnEncoder = 0;
1540     bool bYuv888OverrideJpeg = false;
1541     cam_dimension_t largeYuv888Size = {0, 0};
1542     cam_dimension_t maxViewfinderSize = {0, 0};
1543     bool bJpegExceeds4K = false;
1544     bool bJpegOnEncoder = false;
1545     bool bUseCommonFeatureMask = false;
1546     cam_feature_mask_t commonFeatureMask = 0;
1547     bool bSmallJpegSize = false;
1548     uint32_t width_ratio;
1549     uint32_t height_ratio;
1550     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1551     camera3_stream_t *inputStream = NULL;
1552     bool isJpeg = false;
1553     cam_dimension_t jpegSize = {0, 0};
1554 
1555     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1556 
1557     /*EIS configuration*/
1558     bool eisSupported = false;
1559     bool oisSupported = false;
1560     int32_t margin_index = -1;
1561     uint8_t eis_prop_set;
1562     uint32_t maxEisWidth = 0;
1563     uint32_t maxEisHeight = 0;
1564 
1565     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1566 
1567     size_t count = IS_TYPE_MAX;
1568     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1569     for (size_t i = 0; i < count; i++) {
1570         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1571             eisSupported = true;
1572             margin_index = (int32_t)i;
1573             break;
1574         }
1575     }
1576 
1577     count = CAM_OPT_STAB_MAX;
1578     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1579     for (size_t i = 0; i < count; i++) {
1580         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1581             oisSupported = true;
1582             break;
1583         }
1584     }
1585 
1586     if (eisSupported) {
1587         maxEisWidth = MAX_EIS_WIDTH;
1588         maxEisHeight = MAX_EIS_HEIGHT;
1589     }
1590 
1591     /* EIS setprop control */
1592     char eis_prop[PROPERTY_VALUE_MAX];
1593     memset(eis_prop, 0, sizeof(eis_prop));
1594     property_get("persist.camera.eis.enable", eis_prop, "0");
1595     eis_prop_set = (uint8_t)atoi(eis_prop);
1596 
1597     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1598             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1599 
1600     /* stream configurations */
1601     for (size_t i = 0; i < streamList->num_streams; i++) {
1602         camera3_stream_t *newStream = streamList->streams[i];
1603         LOGI("stream[%d] type = %d, format = %d, width = %d, "
1604                 "height = %d, rotation = %d, usage = 0x%x",
1605                  i, newStream->stream_type, newStream->format,
1606                 newStream->width, newStream->height, newStream->rotation,
1607                 newStream->usage);
1608         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1609                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1610             isZsl = true;
1611         }
1612         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1613                 IS_USAGE_PREVIEW(newStream->usage)) {
1614             isPreview = true;
1615         }
1616 
1617         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1618             inputStream = newStream;
1619         }
1620 
1621         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1622             isJpeg = true;
1623             jpegSize.width = newStream->width;
1624             jpegSize.height = newStream->height;
1625             if (newStream->width > VIDEO_4K_WIDTH ||
1626                     newStream->height > VIDEO_4K_HEIGHT)
1627                 bJpegExceeds4K = true;
1628         }
1629 
1630         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1631                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1632             m_bIsVideo = true;
1633             videoWidth = newStream->width;
1634             videoHeight = newStream->height;
1635             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1636                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1637                 m_bIs4KVideo = true;
1638             }
1639             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1640                                   (newStream->height <= maxEisHeight);
1641 
1642         }
1643         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1644                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1645             switch (newStream->format) {
1646             case HAL_PIXEL_FORMAT_BLOB:
1647                 stallStreamCnt++;
1648                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1649                         newStream->height)) {
1650                     numStreamsOnEncoder++;
1651                     bJpegOnEncoder = true;
1652                 }
1653                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1654                         newStream->width);
1655                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1656                         newStream->height);;
1657                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1658                         "FATAL: max_downscale_factor cannot be zero and so assert");
1659                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1660                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1661                     LOGH("Setting small jpeg size flag to true");
1662                     bSmallJpegSize = true;
1663                 }
1664                 break;
1665             case HAL_PIXEL_FORMAT_RAW10:
1666             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1667             case HAL_PIXEL_FORMAT_RAW16:
1668                 rawStreamCnt++;
1669                 break;
1670             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1671                 processedStreamCnt++;
1672                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1673                         newStream->height)) {
1674                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1675                             !IS_USAGE_ZSL(newStream->usage)) {
1676                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1677                     }
1678                     numStreamsOnEncoder++;
1679                 }
1680                 break;
1681             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1682                 processedStreamCnt++;
1683                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1684                         newStream->height)) {
1685                     // If Yuv888 size is not greater than 4K, set feature mask
1686                     // to SUPERSET so that it support concurrent request on
1687                     // YUV and JPEG.
1688                     if (newStream->width <= VIDEO_4K_WIDTH &&
1689                             newStream->height <= VIDEO_4K_HEIGHT) {
1690                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1691                     }
1692                     numStreamsOnEncoder++;
1693                     numYuv888OnEncoder++;
1694                     largeYuv888Size.width = newStream->width;
1695                     largeYuv888Size.height = newStream->height;
1696                 }
1697                 break;
1698             default:
1699                 processedStreamCnt++;
1700                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1701                         newStream->height)) {
1702                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1703                     numStreamsOnEncoder++;
1704                 }
1705                 break;
1706             }
1707 
1708         }
1709     }
1710 
1711     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1712         !m_bIsVideo) {
1713         m_bEisEnable = false;
1714     }
1715 
1716     if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1717         pthread_mutex_unlock(&mMutex);
1718         return -EINVAL;
1719     }
1720     /* Logic to enable/disable TNR based on specific config size/etc.*/
1721     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1722             ((videoWidth == 1920 && videoHeight == 1080) ||
1723             (videoWidth == 1280 && videoHeight == 720)) &&
1724             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1725         m_bTnrEnabled = true;
1726 
1727     /* Check if num_streams is sane */
1728     if (stallStreamCnt > MAX_STALLING_STREAMS ||
1729             rawStreamCnt > MAX_RAW_STREAMS ||
1730             processedStreamCnt > MAX_PROCESSED_STREAMS) {
1731         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1732                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
1733         pthread_mutex_unlock(&mMutex);
1734         return -EINVAL;
1735     }
1736     /* Check whether we have zsl stream or 4k video case */
1737     if (isZsl && m_bIsVideo) {
1738         LOGE("Currently invalid configuration ZSL&Video!");
1739         pthread_mutex_unlock(&mMutex);
1740         return -EINVAL;
1741     }
1742     /* Check if stream sizes are sane */
1743     if (numStreamsOnEncoder > 2) {
1744         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1745         pthread_mutex_unlock(&mMutex);
1746         return -EINVAL;
1747     } else if (1 < numStreamsOnEncoder){
1748         bUseCommonFeatureMask = true;
1749         LOGH("Multiple streams above max viewfinder size, common mask needed");
1750     }
1751 
1752     /* Check if BLOB size is greater than 4k in 4k recording case */
1753     if (m_bIs4KVideo && bJpegExceeds4K) {
1754         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1755         pthread_mutex_unlock(&mMutex);
1756         return -EINVAL;
1757     }
1758 
1759     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1760     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1761     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1762     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1763     // configurations:
1764     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1765     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1766     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1767     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1768         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1769                 __func__);
1770         pthread_mutex_unlock(&mMutex);
1771         return -EINVAL;
1772     }
1773 
1774     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1775     // the YUV stream's size is greater or equal to the JPEG size, set common
1776     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1777     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1778             jpegSize.width, jpegSize.height) &&
1779             largeYuv888Size.width > jpegSize.width &&
1780             largeYuv888Size.height > jpegSize.height) {
1781         bYuv888OverrideJpeg = true;
1782     } else if (!isJpeg && numStreamsOnEncoder > 1) {
1783         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1784     }
1785 
1786     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1787             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1788             commonFeatureMask);
1789     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1790             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1791 
1792     rc = validateStreamDimensions(streamList);
1793     if (rc == NO_ERROR) {
1794         rc = validateStreamRotations(streamList);
1795     }
1796     if (rc != NO_ERROR) {
1797         LOGE("Invalid stream configuration requested!");
1798         pthread_mutex_unlock(&mMutex);
1799         return rc;
1800     }
1801 
1802     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1803     camera3_stream_t *jpegStream = NULL;
1804     for (size_t i = 0; i < streamList->num_streams; i++) {
1805         camera3_stream_t *newStream = streamList->streams[i];
1806         LOGH("newStream type = %d, stream format = %d "
1807                 "stream size : %d x %d, stream rotation = %d",
1808                  newStream->stream_type, newStream->format,
1809                 newStream->width, newStream->height, newStream->rotation);
1810         //if the stream is in the mStreamList validate it
1811         bool stream_exists = false;
1812         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1813                 it != mStreamInfo.end(); it++) {
1814             if ((*it)->stream == newStream) {
1815                 QCamera3ProcessingChannel *channel =
1816                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
1817                 stream_exists = true;
1818                 if (channel)
1819                     delete channel;
1820                 (*it)->status = VALID;
1821                 (*it)->stream->priv = NULL;
1822                 (*it)->channel = NULL;
1823             }
1824         }
1825         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1826             //new stream
1827             stream_info_t* stream_info;
1828             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1829             if (!stream_info) {
1830                LOGE("Could not allocate stream info");
1831                rc = -ENOMEM;
1832                pthread_mutex_unlock(&mMutex);
1833                return rc;
1834             }
1835             stream_info->stream = newStream;
1836             stream_info->status = VALID;
1837             stream_info->channel = NULL;
1838             mStreamInfo.push_back(stream_info);
1839         }
1840         /* Covers Opaque ZSL and API1 F/W ZSL */
1841         if (IS_USAGE_ZSL(newStream->usage)
1842                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1843             if (zslStream != NULL) {
1844                 LOGE("Multiple input/reprocess streams requested!");
1845                 pthread_mutex_unlock(&mMutex);
1846                 return BAD_VALUE;
1847             }
1848             zslStream = newStream;
1849         }
1850         /* Covers YUV reprocess */
1851         if (inputStream != NULL) {
1852             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1853                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1854                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1855                     && inputStream->width == newStream->width
1856                     && inputStream->height == newStream->height) {
1857                 if (zslStream != NULL) {
1858                     /* This scenario indicates multiple YUV streams with same size
1859                      * as input stream have been requested, since zsl stream handle
1860                      * is solely use for the purpose of overriding the size of streams
1861                      * which share h/w streams we will just make a guess here as to
1862                      * which of the stream is a ZSL stream, this will be refactored
1863                      * once we make generic logic for streams sharing encoder output
1864                      */
1865                     LOGH("Warning, Multiple ip/reprocess streams requested!");
1866                 }
1867                 zslStream = newStream;
1868             }
1869         }
1870         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1871             jpegStream = newStream;
1872         }
1873     }
1874 
1875     /* If a zsl stream is set, we know that we have configured at least one input or
1876        bidirectional stream */
1877     if (NULL != zslStream) {
1878         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1879         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1880         mInputStreamInfo.format = zslStream->format;
1881         mInputStreamInfo.usage = zslStream->usage;
1882         LOGD("Input stream configured! %d x %d, format %d, usage %d",
1883                  mInputStreamInfo.dim.width,
1884                 mInputStreamInfo.dim.height,
1885                 mInputStreamInfo.format, mInputStreamInfo.usage);
1886     }
1887 
1888     cleanAndSortStreamInfo();
1889     if (mMetadataChannel) {
1890         delete mMetadataChannel;
1891         mMetadataChannel = NULL;
1892     }
1893     if (mSupportChannel) {
1894         delete mSupportChannel;
1895         mSupportChannel = NULL;
1896     }
1897 
1898     if (mAnalysisChannel) {
1899         delete mAnalysisChannel;
1900         mAnalysisChannel = NULL;
1901     }
1902 
1903     if (mDummyBatchChannel) {
1904         delete mDummyBatchChannel;
1905         mDummyBatchChannel = NULL;
1906     }
1907 
1908     //Create metadata channel and initialize it
1909     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1910     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1911             gCamCapability[mCameraId]->color_arrangement);
1912     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1913                     mChannelHandle, mCameraHandle->ops, captureResultCb,
1914                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1915     if (mMetadataChannel == NULL) {
1916         LOGE("failed to allocate metadata channel");
1917         rc = -ENOMEM;
1918         pthread_mutex_unlock(&mMutex);
1919         return rc;
1920     }
1921     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1922     if (rc < 0) {
1923         LOGE("metadata channel initialization failed");
1924         delete mMetadataChannel;
1925         mMetadataChannel = NULL;
1926         pthread_mutex_unlock(&mMutex);
1927         return rc;
1928     }
1929 
1930     // Create analysis stream all the time, even when h/w support is not available
1931     {
1932         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1933         cam_analysis_info_t analysisInfo;
1934         rc = mCommon.getAnalysisInfo(
1935                 FALSE,
1936                 TRUE,
1937                 analysisFeatureMask,
1938                 &analysisInfo);
1939         if (rc != NO_ERROR) {
1940             LOGE("getAnalysisInfo failed, ret = %d", rc);
1941             pthread_mutex_unlock(&mMutex);
1942             return rc;
1943         }
1944 
1945         cam_color_filter_arrangement_t analysis_color_arrangement =
1946                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1947                 CAM_FILTER_ARRANGEMENT_Y :
1948                 gCamCapability[mCameraId]->color_arrangement);
1949         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1950                 analysis_color_arrangement);
1951 
1952         mAnalysisChannel = new QCamera3SupportChannel(
1953                 mCameraHandle->camera_handle,
1954                 mChannelHandle,
1955                 mCameraHandle->ops,
1956                 &analysisInfo.analysis_padding_info,
1957                 analysisFeatureMask,
1958                 CAM_STREAM_TYPE_ANALYSIS,
1959                 &analysisInfo.analysis_max_res,
1960                 (analysisInfo.analysis_format
1961                 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1962                 : CAM_FORMAT_YUV_420_NV21),
1963                 analysisInfo.hw_analysis_supported,
1964                 this,
1965                 0); // force buffer count to 0
1966         if (!mAnalysisChannel) {
1967             LOGE("H/W Analysis channel cannot be created");
1968             pthread_mutex_unlock(&mMutex);
1969             return -ENOMEM;
1970         }
1971     }
1972 
1973     bool isRawStreamRequested = false;
1974     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1975     /* Allocate channel objects for the requested streams */
1976     for (size_t i = 0; i < streamList->num_streams; i++) {
1977         camera3_stream_t *newStream = streamList->streams[i];
1978         uint32_t stream_usage = newStream->usage;
1979         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1980         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1981         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1982                 || IS_USAGE_ZSL(newStream->usage)) &&
1983             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1984             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1985             if (bUseCommonFeatureMask) {
1986                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1987                         commonFeatureMask;
1988             } else {
1989                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1990                         CAM_QCOM_FEATURE_NONE;
1991             }
1992 
1993         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1994                 LOGH("Input stream configured, reprocess config");
1995         } else {
1996             //for non zsl streams find out the format
1997             switch (newStream->format) {
1998             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1999             {
2000                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2001                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2002                 /* add additional features to pp feature mask */
2003                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2004                         mStreamConfigInfo.num_streams);
2005 
2006                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2007                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2008                                 CAM_STREAM_TYPE_VIDEO;
2009                     if (m_bTnrEnabled && m_bTnrVideo) {
2010                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2011                             CAM_QCOM_FEATURE_CPP_TNR;
2012                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2013                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2014                                 ~CAM_QCOM_FEATURE_CDS;
2015                     }
2016                 } else {
2017                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2018                             CAM_STREAM_TYPE_PREVIEW;
2019                     if (m_bTnrEnabled && m_bTnrPreview) {
2020                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2021                                 CAM_QCOM_FEATURE_CPP_TNR;
2022                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2023                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2024                                 ~CAM_QCOM_FEATURE_CDS;
2025                     }
2026                     padding_info.width_padding = mSurfaceStridePadding;
2027                     padding_info.height_padding = CAM_PAD_TO_2;
2028                 }
2029                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2030                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2031                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2032                             newStream->height;
2033                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2034                             newStream->width;
2035                 }
2036             }
2037             break;
2038             case HAL_PIXEL_FORMAT_YCbCr_420_888:
2039                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2040                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2041                     if (bUseCommonFeatureMask)
2042                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2043                                 commonFeatureMask;
2044                     else
2045                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2046                                 CAM_QCOM_FEATURE_NONE;
2047                 } else {
2048                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2049                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2050                 }
2051             break;
2052             case HAL_PIXEL_FORMAT_BLOB:
2053                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2054                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2055                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2056                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2057                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2058                 } else {
2059                     if (bUseCommonFeatureMask &&
2060                             isOnEncoder(maxViewfinderSize, newStream->width,
2061                             newStream->height)) {
2062                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2063                     } else {
2064                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2065                     }
2066                 }
2067                 if (isZsl) {
2068                     if (zslStream) {
2069                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2070                                 (int32_t)zslStream->width;
2071                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2072                                 (int32_t)zslStream->height;
2073                     } else {
2074                         LOGE("Error, No ZSL stream identified");
2075                         pthread_mutex_unlock(&mMutex);
2076                         return -EINVAL;
2077                     }
2078                 } else if (m_bIs4KVideo) {
2079                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2080                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2081                 } else if (bYuv888OverrideJpeg) {
2082                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2083                             (int32_t)largeYuv888Size.width;
2084                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2085                             (int32_t)largeYuv888Size.height;
2086                 }
2087                 break;
2088             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2089             case HAL_PIXEL_FORMAT_RAW16:
2090             case HAL_PIXEL_FORMAT_RAW10:
2091                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2092                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2093                 isRawStreamRequested = true;
2094                 break;
2095             default:
2096                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2097                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2098                 break;
2099             }
2100         }
2101 
2102         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2103                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2104                 gCamCapability[mCameraId]->color_arrangement);
2105 
2106         if (newStream->priv == NULL) {
2107             //New stream, construct channel
2108             switch (newStream->stream_type) {
2109             case CAMERA3_STREAM_INPUT:
2110                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2111                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2112                 break;
2113             case CAMERA3_STREAM_BIDIRECTIONAL:
2114                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2115                     GRALLOC_USAGE_HW_CAMERA_WRITE;
2116                 break;
2117             case CAMERA3_STREAM_OUTPUT:
2118                 /* For video encoding stream, set read/write rarely
2119                  * flag so that they may be set to un-cached */
2120                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2121                     newStream->usage |=
2122                          (GRALLOC_USAGE_SW_READ_RARELY |
2123                          GRALLOC_USAGE_SW_WRITE_RARELY |
2124                          GRALLOC_USAGE_HW_CAMERA_WRITE);
2125                 else if (IS_USAGE_ZSL(newStream->usage))
2126                 {
2127                     LOGD("ZSL usage flag skipping");
2128                 }
2129                 else if (newStream == zslStream
2130                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2131                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2132                 } else
2133                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2134                 break;
2135             default:
2136                 LOGE("Invalid stream_type %d", newStream->stream_type);
2137                 break;
2138             }
2139 
2140             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2141                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2142                 QCamera3ProcessingChannel *channel = NULL;
2143                 switch (newStream->format) {
2144                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2145                     if ((newStream->usage &
2146                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2147                             (streamList->operation_mode ==
2148                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2149                     ) {
2150                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2151                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2152                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2153                                 this,
2154                                 newStream,
2155                                 (cam_stream_type_t)
2156                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2157                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2158                                 mMetadataChannel,
2159                                 0); //heap buffers are not required for HFR video channel
2160                         if (channel == NULL) {
2161                             LOGE("allocation of channel failed");
2162                             pthread_mutex_unlock(&mMutex);
2163                             return -ENOMEM;
2164                         }
2165                         //channel->getNumBuffers() will return 0 here so use
2166                         //MAX_INFLIGH_HFR_REQUESTS
2167                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2168                         newStream->priv = channel;
2169                         LOGI("num video buffers in HFR mode: %d",
2170                                  MAX_INFLIGHT_HFR_REQUESTS);
2171                     } else {
2172                         /* Copy stream contents in HFR preview only case to create
2173                          * dummy batch channel so that sensor streaming is in
2174                          * HFR mode */
2175                         if (!m_bIsVideo && (streamList->operation_mode ==
2176                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2177                             mDummyBatchStream = *newStream;
2178                         }
2179                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2180                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
2181                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2182                                 this,
2183                                 newStream,
2184                                 (cam_stream_type_t)
2185                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2186                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2187                                 mMetadataChannel,
2188                                 MAX_INFLIGHT_REQUESTS);
2189                         if (channel == NULL) {
2190                             LOGE("allocation of channel failed");
2191                             pthread_mutex_unlock(&mMutex);
2192                             return -ENOMEM;
2193                         }
2194                         newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2195                         newStream->priv = channel;
2196                     }
2197                     break;
2198                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2199                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2200                             mChannelHandle,
2201                             mCameraHandle->ops, captureResultCb,
2202                             setBufferErrorStatus, &padding_info,
2203                             this,
2204                             newStream,
2205                             (cam_stream_type_t)
2206                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2207                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2208                             mMetadataChannel);
2209                     if (channel == NULL) {
2210                         LOGE("allocation of YUV channel failed");
2211                         pthread_mutex_unlock(&mMutex);
2212                         return -ENOMEM;
2213                     }
2214                     newStream->max_buffers = channel->getNumBuffers();
2215                     newStream->priv = channel;
2216                     break;
2217                 }
2218                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2219                 case HAL_PIXEL_FORMAT_RAW16:
2220                 case HAL_PIXEL_FORMAT_RAW10:
2221                     mRawChannel = new QCamera3RawChannel(
2222                             mCameraHandle->camera_handle, mChannelHandle,
2223                             mCameraHandle->ops, captureResultCb,
2224                             setBufferErrorStatus, &padding_info,
2225                             this, newStream,
2226                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2227                             mMetadataChannel,
2228                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2229                     if (mRawChannel == NULL) {
2230                         LOGE("allocation of raw channel failed");
2231                         pthread_mutex_unlock(&mMutex);
2232                         return -ENOMEM;
2233                     }
2234                     newStream->max_buffers = mRawChannel->getNumBuffers();
2235                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2236                     break;
2237                 case HAL_PIXEL_FORMAT_BLOB:
2238                     // Max live snapshot inflight buffer is 1. This is to mitigate
2239                     // frame drop issues for video snapshot. The more buffers being
2240                     // allocated, the more frame drops there are.
2241                     mPictureChannel = new QCamera3PicChannel(
2242                             mCameraHandle->camera_handle, mChannelHandle,
2243                             mCameraHandle->ops, captureResultCb,
2244                             setBufferErrorStatus, &padding_info, this, newStream,
2245                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2246                             m_bIs4KVideo, isZsl, mMetadataChannel,
2247                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2248                     if (mPictureChannel == NULL) {
2249                         LOGE("allocation of channel failed");
2250                         pthread_mutex_unlock(&mMutex);
2251                         return -ENOMEM;
2252                     }
2253                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2254                     newStream->max_buffers = mPictureChannel->getNumBuffers();
2255                     mPictureChannel->overrideYuvSize(
2256                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2257                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2258                     break;
2259 
2260                 default:
2261                     LOGE("not a supported format 0x%x", newStream->format);
2262                     pthread_mutex_unlock(&mMutex);
2263                     return -EINVAL;
2264                 }
2265             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2266                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2267             } else {
2268                 LOGE("Error, Unknown stream type");
2269                 pthread_mutex_unlock(&mMutex);
2270                 return -EINVAL;
2271             }
2272 
2273             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2274             if (channel != NULL && channel->isUBWCEnabled()) {
2275                 cam_format_t fmt = channel->getStreamDefaultFormat(
2276                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2277                         newStream->width, newStream->height);
2278                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2279                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2280                 }
2281             }
2282 
2283             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2284                     it != mStreamInfo.end(); it++) {
2285                 if ((*it)->stream == newStream) {
2286                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2287                     break;
2288                 }
2289             }
2290         } else {
2291             // Channel already exists for this stream
2292             // Do nothing for now
2293         }
2294         padding_info = gCamCapability[mCameraId]->padding_info;
2295 
2296         /* Do not add entries for input stream in metastream info
2297          * since there is no real stream associated with it
2298          */
2299         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2300             mStreamConfigInfo.num_streams++;
2301     }
2302 
2303     //RAW DUMP channel
2304     if (mEnableRawDump && isRawStreamRequested == false){
2305         cam_dimension_t rawDumpSize;
2306         rawDumpSize = getMaxRawSize(mCameraId);
2307         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2308         setPAAFSupport(rawDumpFeatureMask,
2309                 CAM_STREAM_TYPE_RAW,
2310                 gCamCapability[mCameraId]->color_arrangement);
2311         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2312                                   mChannelHandle,
2313                                   mCameraHandle->ops,
2314                                   rawDumpSize,
2315                                   &padding_info,
2316                                   this, rawDumpFeatureMask);
2317         if (!mRawDumpChannel) {
2318             LOGE("Raw Dump channel cannot be created");
2319             pthread_mutex_unlock(&mMutex);
2320             return -ENOMEM;
2321         }
2322     }
2323 
2324 
2325     if (mAnalysisChannel) {
2326         cam_analysis_info_t analysisInfo;
2327         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2328         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2329                 CAM_STREAM_TYPE_ANALYSIS;
2330         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2332         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2333                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2334                 &analysisInfo);
2335         if (rc != NO_ERROR) {
2336             LOGE("getAnalysisInfo failed, ret = %d", rc);
2337             pthread_mutex_unlock(&mMutex);
2338             return rc;
2339         }
2340         cam_color_filter_arrangement_t analysis_color_arrangement =
2341                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2342                 CAM_FILTER_ARRANGEMENT_Y :
2343                 gCamCapability[mCameraId]->color_arrangement);
2344         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2345                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2346                 analysis_color_arrangement);
2347 
2348         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2349                 analysisInfo.analysis_max_res;
2350         mStreamConfigInfo.num_streams++;
2351     }
2352 
2353     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2354         cam_analysis_info_t supportInfo;
2355         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2356         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2357         setPAAFSupport(callbackFeatureMask,
2358                 CAM_STREAM_TYPE_CALLBACK,
2359                 gCamCapability[mCameraId]->color_arrangement);
2360         rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2361         if (rc != NO_ERROR) {
2362             LOGE("getAnalysisInfo failed, ret = %d", rc);
2363             pthread_mutex_unlock(&mMutex);
2364             return rc;
2365         }
2366         mSupportChannel = new QCamera3SupportChannel(
2367                 mCameraHandle->camera_handle,
2368                 mChannelHandle,
2369                 mCameraHandle->ops,
2370                 &gCamCapability[mCameraId]->padding_info,
2371                 callbackFeatureMask,
2372                 CAM_STREAM_TYPE_CALLBACK,
2373                 &QCamera3SupportChannel::kDim,
2374                 CAM_FORMAT_YUV_420_NV21,
2375                 supportInfo.hw_analysis_supported,
2376                 this, 0);
2377         if (!mSupportChannel) {
2378             LOGE("dummy channel cannot be created");
2379             pthread_mutex_unlock(&mMutex);
2380             return -ENOMEM;
2381         }
2382     }
2383 
2384     if (mSupportChannel) {
2385         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2386                 QCamera3SupportChannel::kDim;
2387         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2388                 CAM_STREAM_TYPE_CALLBACK;
2389         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2390                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2391         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2392                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2393                 gCamCapability[mCameraId]->color_arrangement);
2394         mStreamConfigInfo.num_streams++;
2395     }
2396 
2397     if (mRawDumpChannel) {
2398         cam_dimension_t rawSize;
2399         rawSize = getMaxRawSize(mCameraId);
2400         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2401                 rawSize;
2402         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2403                 CAM_STREAM_TYPE_RAW;
2404         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2405                 CAM_QCOM_FEATURE_NONE;
2406         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2407                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2408                 gCamCapability[mCameraId]->color_arrangement);
2409         mStreamConfigInfo.num_streams++;
2410     }
2411     /* In HFR mode, if video stream is not added, create a dummy channel so that
2412      * ISP can create a batch mode even for preview only case. This channel is
2413      * never 'start'ed (no stream-on), it is only 'initialized'  */
2414     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2415             !m_bIsVideo) {
2416         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2417         setPAAFSupport(dummyFeatureMask,
2418                 CAM_STREAM_TYPE_VIDEO,
2419                 gCamCapability[mCameraId]->color_arrangement);
2420         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2421                 mChannelHandle,
2422                 mCameraHandle->ops, captureResultCb,
2423                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2424                 this,
2425                 &mDummyBatchStream,
2426                 CAM_STREAM_TYPE_VIDEO,
2427                 dummyFeatureMask,
2428                 mMetadataChannel);
2429         if (NULL == mDummyBatchChannel) {
2430             LOGE("creation of mDummyBatchChannel failed."
2431                     "Preview will use non-hfr sensor mode ");
2432         }
2433     }
2434     if (mDummyBatchChannel) {
2435         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2436                 mDummyBatchStream.width;
2437         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2438                 mDummyBatchStream.height;
2439         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2440                 CAM_STREAM_TYPE_VIDEO;
2441         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2442                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2443         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2444                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2445                 gCamCapability[mCameraId]->color_arrangement);
2446         mStreamConfigInfo.num_streams++;
2447     }
2448 
2449     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2450     mStreamConfigInfo.buffer_info.max_buffers =
2451             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2452 
2453     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2454     for (pendingRequestIterator i = mPendingRequestsList.begin();
2455             i != mPendingRequestsList.end();) {
2456         i = erasePendingRequest(i);
2457     }
2458     mPendingFrameDropList.clear();
2459     // Initialize/Reset the pending buffers list
2460     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2461         req.mPendingBufferList.clear();
2462     }
2463     mPendingBuffersMap.mPendingBuffersInRequest.clear();
2464 
2465     mPendingReprocessResultList.clear();
2466 
2467     mCurJpegMeta.clear();
2468     //Get min frame duration for this streams configuration
2469     deriveMinFrameDuration();
2470 
2471     // Update state
2472     mState = CONFIGURED;
2473 
2474     pthread_mutex_unlock(&mMutex);
2475 
2476     return rc;
2477 }
2478 
2479 /*===========================================================================
2480  * FUNCTION   : validateCaptureRequest
2481  *
2482  * DESCRIPTION: validate a capture request from camera service
2483  *
2484  * PARAMETERS :
2485  *   @request : request from framework to process
2486  *
2487  * RETURN     :
2488  *
2489  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)2490 int QCamera3HardwareInterface::validateCaptureRequest(
2491                     camera3_capture_request_t *request)
2492 {
2493     ssize_t idx = 0;
2494     const camera3_stream_buffer_t *b;
2495     CameraMetadata meta;
2496 
2497     /* Sanity check the request */
2498     if (request == NULL) {
2499         LOGE("NULL capture request");
2500         return BAD_VALUE;
2501     }
2502 
2503     if ((request->settings == NULL) && (mState == CONFIGURED)) {
2504         /*settings cannot be null for the first request*/
2505         return BAD_VALUE;
2506     }
2507 
2508     uint32_t frameNumber = request->frame_number;
2509     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2510         LOGE("Request %d: No output buffers provided!",
2511                 __FUNCTION__, frameNumber);
2512         return BAD_VALUE;
2513     }
2514     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2515         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2516                  request->num_output_buffers, MAX_NUM_STREAMS);
2517         return BAD_VALUE;
2518     }
2519     if (request->input_buffer != NULL) {
2520         b = request->input_buffer;
2521         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2522             LOGE("Request %d: Buffer %ld: Status not OK!",
2523                      frameNumber, (long)idx);
2524             return BAD_VALUE;
2525         }
2526         if (b->release_fence != -1) {
2527             LOGE("Request %d: Buffer %ld: Has a release fence!",
2528                      frameNumber, (long)idx);
2529             return BAD_VALUE;
2530         }
2531         if (b->buffer == NULL) {
2532             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2533                      frameNumber, (long)idx);
2534             return BAD_VALUE;
2535         }
2536     }
2537 
2538     // Validate all buffers
2539     b = request->output_buffers;
2540     do {
2541         QCamera3ProcessingChannel *channel =
2542                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2543         if (channel == NULL) {
2544             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2545                      frameNumber, (long)idx);
2546             return BAD_VALUE;
2547         }
2548         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2549             LOGE("Request %d: Buffer %ld: Status not OK!",
2550                      frameNumber, (long)idx);
2551             return BAD_VALUE;
2552         }
2553         if (b->release_fence != -1) {
2554             LOGE("Request %d: Buffer %ld: Has a release fence!",
2555                      frameNumber, (long)idx);
2556             return BAD_VALUE;
2557         }
2558         if (b->buffer == NULL) {
2559             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2560                      frameNumber, (long)idx);
2561             return BAD_VALUE;
2562         }
2563         if (*(b->buffer) == NULL) {
2564             LOGE("Request %d: Buffer %ld: NULL private handle!",
2565                      frameNumber, (long)idx);
2566             return BAD_VALUE;
2567         }
2568         idx++;
2569         b = request->output_buffers + idx;
2570     } while (idx < (ssize_t)request->num_output_buffers);
2571 
2572     return NO_ERROR;
2573 }
2574 
2575 /*===========================================================================
2576  * FUNCTION   : deriveMinFrameDuration
2577  *
2578  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2579  *              on currently configured streams.
2580  *
2581  * PARAMETERS : NONE
2582  *
2583  * RETURN     : NONE
2584  *
2585  *==========================================================================*/
deriveMinFrameDuration()2586 void QCamera3HardwareInterface::deriveMinFrameDuration()
2587 {
2588     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2589 
2590     maxJpegDim = 0;
2591     maxProcessedDim = 0;
2592     maxRawDim = 0;
2593 
2594     // Figure out maximum jpeg, processed, and raw dimensions
2595     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2596         it != mStreamInfo.end(); it++) {
2597 
2598         // Input stream doesn't have valid stream_type
2599         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2600             continue;
2601 
2602         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2603         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2604             if (dimension > maxJpegDim)
2605                 maxJpegDim = dimension;
2606         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2607                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2608                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2609             if (dimension > maxRawDim)
2610                 maxRawDim = dimension;
2611         } else {
2612             if (dimension > maxProcessedDim)
2613                 maxProcessedDim = dimension;
2614         }
2615     }
2616 
2617     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2618             MAX_SIZES_CNT);
2619 
2620     //Assume all jpeg dimensions are in processed dimensions.
2621     if (maxJpegDim > maxProcessedDim)
2622         maxProcessedDim = maxJpegDim;
2623     //Find the smallest raw dimension that is greater or equal to jpeg dimension
2624     if (maxProcessedDim > maxRawDim) {
2625         maxRawDim = INT32_MAX;
2626 
2627         for (size_t i = 0; i < count; i++) {
2628             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2629                     gCamCapability[mCameraId]->raw_dim[i].height;
2630             if (dimension >= maxProcessedDim && dimension < maxRawDim)
2631                 maxRawDim = dimension;
2632         }
2633     }
2634 
2635     //Find minimum durations for processed, jpeg, and raw
2636     for (size_t i = 0; i < count; i++) {
2637         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2638                 gCamCapability[mCameraId]->raw_dim[i].height) {
2639             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2640             break;
2641         }
2642     }
2643     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2644     for (size_t i = 0; i < count; i++) {
2645         if (maxProcessedDim ==
2646                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2647                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2648             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2649             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2650             break;
2651         }
2652     }
2653 }
2654 
2655 /*===========================================================================
2656  * FUNCTION   : getMinFrameDuration
2657  *
2658  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2659  *              and current request configuration.
2660  *
2661  * PARAMETERS : @request: requset sent by the frameworks
2662  *
2663  * RETURN     : min farme duration for a particular request
2664  *
2665  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2666 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2667 {
2668     bool hasJpegStream = false;
2669     bool hasRawStream = false;
2670     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2671         const camera3_stream_t *stream = request->output_buffers[i].stream;
2672         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2673             hasJpegStream = true;
2674         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2675                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2676                 stream->format == HAL_PIXEL_FORMAT_RAW16)
2677             hasRawStream = true;
2678     }
2679 
2680     if (!hasJpegStream)
2681         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2682     else
2683         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2684 }
2685 
2686 /*===========================================================================
2687  * FUNCTION   : handleBuffersDuringFlushLock
2688  *
2689  * DESCRIPTION: Account for buffers returned from back-end during flush
2690  *              This function is executed while mMutex is held by the caller.
2691  *
2692  * PARAMETERS :
2693  *   @buffer: image buffer for the callback
2694  *
2695  * RETURN     :
2696  *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)2697 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2698 {
2699     bool buffer_found = false;
2700     for (List<PendingBuffersInRequest>::iterator req =
2701             mPendingBuffersMap.mPendingBuffersInRequest.begin();
2702             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2703         for (List<PendingBufferInfo>::iterator i =
2704                 req->mPendingBufferList.begin();
2705                 i != req->mPendingBufferList.end(); i++) {
2706             if (i->buffer == buffer->buffer) {
2707                 mPendingBuffersMap.numPendingBufsAtFlush--;
2708                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2709                     buffer->buffer, req->frame_number,
2710                     mPendingBuffersMap.numPendingBufsAtFlush);
2711                 buffer_found = true;
2712                 break;
2713             }
2714         }
2715         if (buffer_found) {
2716             break;
2717         }
2718     }
2719     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2720         //signal the flush()
2721         LOGD("All buffers returned to HAL. Continue flush");
2722         pthread_cond_signal(&mBuffersCond);
2723     }
2724 }
2725 
2726 
2727 /*===========================================================================
2728  * FUNCTION   : handlePendingReprocResults
2729  *
2730  * DESCRIPTION: check and notify on any pending reprocess results
2731  *
2732  * PARAMETERS :
2733  *   @frame_number   : Pending request frame number
2734  *
2735  * RETURN     : int32_t type of status
2736  *              NO_ERROR  -- success
2737  *              none-zero failure code
2738  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2739 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2740 {
2741     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2742             j != mPendingReprocessResultList.end(); j++) {
2743         if (j->frame_number == frame_number) {
2744             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2745 
2746             LOGD("Delayed reprocess notify %d",
2747                     frame_number);
2748 
2749             for (pendingRequestIterator k = mPendingRequestsList.begin();
2750                     k != mPendingRequestsList.end(); k++) {
2751 
2752                 if (k->frame_number == j->frame_number) {
2753                     LOGD("Found reprocess frame number %d in pending reprocess List "
2754                             "Take it out!!",
2755                             k->frame_number);
2756 
2757                     camera3_capture_result result;
2758                     memset(&result, 0, sizeof(camera3_capture_result));
2759                     result.frame_number = frame_number;
2760                     result.num_output_buffers = 1;
2761                     result.output_buffers =  &j->buffer;
2762                     result.input_buffer = k->input_buffer;
2763                     result.result = k->settings;
2764                     result.partial_result = PARTIAL_RESULT_COUNT;
2765                     mCallbackOps->process_capture_result(mCallbackOps, &result);
2766 
2767                     erasePendingRequest(k);
2768                     break;
2769                 }
2770             }
2771             mPendingReprocessResultList.erase(j);
2772             break;
2773         }
2774     }
2775     return NO_ERROR;
2776 }
2777 
2778 /*===========================================================================
2779  * FUNCTION   : handleBatchMetadata
2780  *
2781  * DESCRIPTION: Handles metadata buffer callback in batch mode
2782  *
2783  * PARAMETERS : @metadata_buf: metadata buffer
2784  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2785  *                 the meta buf in this method
2786  *
2787  * RETURN     :
2788  *
2789  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2790 void QCamera3HardwareInterface::handleBatchMetadata(
2791         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2792 {
2793     ATRACE_CALL();
2794 
2795     if (NULL == metadata_buf) {
2796         LOGE("metadata_buf is NULL");
2797         return;
2798     }
2799     /* In batch mode, the metdata will contain the frame number and timestamp of
2800      * the last frame in the batch. Eg: a batch containing buffers from request
2801      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2802      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2803      * multiple process_capture_results */
2804     metadata_buffer_t *metadata =
2805             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2806     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2807     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2808     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2809     uint32_t frame_number = 0, urgent_frame_number = 0;
2810     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2811     bool invalid_metadata = false;
2812     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2813     size_t loopCount = 1;
2814 
2815     int32_t *p_frame_number_valid =
2816             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2817     uint32_t *p_frame_number =
2818             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2819     int64_t *p_capture_time =
2820             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2821     int32_t *p_urgent_frame_number_valid =
2822             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2823     uint32_t *p_urgent_frame_number =
2824             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2825 
2826     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2827             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2828             (NULL == p_urgent_frame_number)) {
2829         LOGE("Invalid metadata");
2830         invalid_metadata = true;
2831     } else {
2832         frame_number_valid = *p_frame_number_valid;
2833         last_frame_number = *p_frame_number;
2834         last_frame_capture_time = *p_capture_time;
2835         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2836         last_urgent_frame_number = *p_urgent_frame_number;
2837     }
2838 
2839     /* In batchmode, when no video buffers are requested, set_parms are sent
2840      * for every capture_request. The difference between consecutive urgent
2841      * frame numbers and frame numbers should be used to interpolate the
2842      * corresponding frame numbers and time stamps */
2843     pthread_mutex_lock(&mMutex);
2844     if (urgent_frame_number_valid) {
2845         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2846         if(idx < 0) {
2847             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2848                 last_urgent_frame_number);
2849             mState = ERROR;
2850             pthread_mutex_unlock(&mMutex);
2851             return;
2852         }
2853         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
2854         urgentFrameNumDiff = last_urgent_frame_number + 1 -
2855                 first_urgent_frame_number;
2856 
2857         LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2858                  urgent_frame_number_valid,
2859                 first_urgent_frame_number, last_urgent_frame_number);
2860     }
2861 
2862     if (frame_number_valid) {
2863         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2864         if(idx < 0) {
2865             LOGE("Invalid frame number received: %d. Irrecoverable error",
2866                 last_frame_number);
2867             mState = ERROR;
2868             pthread_mutex_unlock(&mMutex);
2869             return;
2870         }
2871         first_frame_number = mPendingBatchMap.valueAt(idx);
2872         frameNumDiff = last_frame_number + 1 -
2873                 first_frame_number;
2874         mPendingBatchMap.removeItem(last_frame_number);
2875 
2876         LOGH("frm: valid: %d frm_num: %d - %d",
2877                  frame_number_valid,
2878                 first_frame_number, last_frame_number);
2879 
2880     }
2881     pthread_mutex_unlock(&mMutex);
2882 
2883     if (urgent_frame_number_valid || frame_number_valid) {
2884         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2885         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2886             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2887                      urgentFrameNumDiff, last_urgent_frame_number);
2888         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2889             LOGE("frameNumDiff: %d frameNum: %d",
2890                      frameNumDiff, last_frame_number);
2891     }
2892 
2893     for (size_t i = 0; i < loopCount; i++) {
2894         /* handleMetadataWithLock is called even for invalid_metadata for
2895          * pipeline depth calculation */
2896         if (!invalid_metadata) {
2897             /* Infer frame number. Batch metadata contains frame number of the
2898              * last frame */
2899             if (urgent_frame_number_valid) {
2900                 if (i < urgentFrameNumDiff) {
2901                     urgent_frame_number =
2902                             first_urgent_frame_number + i;
2903                     LOGD("inferred urgent frame_number: %d",
2904                              urgent_frame_number);
2905                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2906                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2907                 } else {
2908                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2909                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2910                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2911                 }
2912             }
2913 
2914             /* Infer frame number. Batch metadata contains frame number of the
2915              * last frame */
2916             if (frame_number_valid) {
2917                 if (i < frameNumDiff) {
2918                     frame_number = first_frame_number + i;
2919                     LOGD("inferred frame_number: %d", frame_number);
2920                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2921                             CAM_INTF_META_FRAME_NUMBER, frame_number);
2922                 } else {
2923                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2924                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2925                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2926                 }
2927             }
2928 
2929             if (last_frame_capture_time) {
2930                 //Infer timestamp
2931                 first_frame_capture_time = last_frame_capture_time -
2932                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2933                 capture_time =
2934                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2935                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2936                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2937                 LOGH("batch capture_time: %lld, capture_time: %lld",
2938                          last_frame_capture_time, capture_time);
2939             }
2940         }
2941         pthread_mutex_lock(&mMutex);
2942         handleMetadataWithLock(metadata_buf,
2943                 false /* free_and_bufdone_meta_buf */,
2944                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
2945                 (i == frameNumDiff-1) /* last metadata in the batch metadata */);
2946         pthread_mutex_unlock(&mMutex);
2947     }
2948 
2949     /* BufDone metadata buffer */
2950     if (free_and_bufdone_meta_buf) {
2951         mMetadataChannel->bufDone(metadata_buf);
2952         free(metadata_buf);
2953     }
2954 }
2955 
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)2956 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2957         camera3_error_msg_code_t errorCode)
2958 {
2959     camera3_notify_msg_t notify_msg;
2960     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2961     notify_msg.type = CAMERA3_MSG_ERROR;
2962     notify_msg.message.error.error_code = errorCode;
2963     notify_msg.message.error.error_stream = NULL;
2964     notify_msg.message.error.frame_number = frameNumber;
2965     mCallbackOps->notify(mCallbackOps, &notify_msg);
2966 
2967     return;
2968 }
2969 /*===========================================================================
2970  * FUNCTION   : handleMetadataWithLock
2971  *
2972  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2973  *
2974  * PARAMETERS : @metadata_buf: metadata buffer
2975  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2976  *                 the meta buf in this method
2977  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
2978  *                  last urgent metadata in a batch. Always true for non-batch mode
2979  *              @lastMetadataInBatch: Boolean to indicate whether this is the
2980  *                  last metadata in a batch. Always true for non-batch mode
2981  *
2982  * RETURN     :
2983  *
2984  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch)2985 void QCamera3HardwareInterface::handleMetadataWithLock(
2986     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2987     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
2988 {
2989     ATRACE_CALL();
2990     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2991         //during flush do not send metadata from this thread
2992         LOGD("not sending metadata during flush or when mState is error");
2993         if (free_and_bufdone_meta_buf) {
2994             mMetadataChannel->bufDone(metadata_buf);
2995             free(metadata_buf);
2996         }
2997         return;
2998     }
2999 
3000     //not in flush
3001     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3002     int32_t frame_number_valid, urgent_frame_number_valid;
3003     uint32_t frame_number, urgent_frame_number;
3004     int64_t capture_time, capture_time_av;
3005     nsecs_t currentSysTime;
3006 
3007     int32_t *p_frame_number_valid =
3008             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3009     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3010     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3011     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3012     int32_t *p_urgent_frame_number_valid =
3013             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3014     uint32_t *p_urgent_frame_number =
3015             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3016     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3017             metadata) {
3018         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3019                  *p_frame_number_valid, *p_frame_number);
3020     }
3021 
3022     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3023             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3024         LOGE("Invalid metadata");
3025         if (free_and_bufdone_meta_buf) {
3026             mMetadataChannel->bufDone(metadata_buf);
3027             free(metadata_buf);
3028         }
3029         goto done_metadata;
3030     }
3031     frame_number_valid =        *p_frame_number_valid;
3032     frame_number =              *p_frame_number;
3033     capture_time =              *p_capture_time;
3034     capture_time_av =           *p_capture_time_av;
3035     urgent_frame_number_valid = *p_urgent_frame_number_valid;
3036     urgent_frame_number =       *p_urgent_frame_number;
3037     currentSysTime =            systemTime(CLOCK_MONOTONIC);
3038 
3039     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3040         const int tries = 3;
3041         nsecs_t bestGap, measured;
3042         for (int i = 0; i < tries; ++i) {
3043             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3044             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3045             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3046             const nsecs_t gap = tmono2 - tmono;
3047             if (i == 0 || gap < bestGap) {
3048                 bestGap = gap;
3049                 measured = tbase - ((tmono + tmono2) >> 1);
3050             }
3051         }
3052         capture_time -= measured;
3053     }
3054 
3055     // Detect if buffers from any requests are overdue
3056     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3057         if ( (currentSysTime - req.timestamp) >
3058             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
3059             for (auto &missed : req.mPendingBufferList) {
3060                 assert(missed.stream->priv);
3061                 if (missed.stream->priv) {
3062                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3063                     assert(ch->mStreams[0]);
3064                     if (ch->mStreams[0]) {
3065                         LOGW("Missing: frame = %d, buffer = %p,"
3066                             "stream type = %d, stream format = %d",
3067                             req.frame_number, missed.buffer,
3068                             ch->mStreams[0]->getMyType(), missed.stream->format);
3069                         ch->timeoutFrame(req.frame_number);
3070                     }
3071                 }
3072             }
3073         }
3074     }
3075     //Partial result on process_capture_result for timestamp
3076     if (urgent_frame_number_valid) {
3077         LOGD("valid urgent frame_number = %u, capture_time = %lld",
3078            urgent_frame_number, capture_time);
3079 
3080         //Recieved an urgent Frame Number, handle it
3081         //using partial results
3082         for (pendingRequestIterator i =
3083                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3084             LOGD("Iterator Frame = %d urgent frame = %d",
3085                  i->frame_number, urgent_frame_number);
3086 
3087             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3088                 (i->partial_result_cnt == 0)) {
3089                 LOGE("Error: HAL missed urgent metadata for frame number %d",
3090                          i->frame_number);
3091                 i->partial_result_cnt++;
3092             }
3093 
3094             if (i->frame_number == urgent_frame_number &&
3095                      i->bUrgentReceived == 0) {
3096 
3097                 camera3_capture_result_t result;
3098                 memset(&result, 0, sizeof(camera3_capture_result_t));
3099 
3100                 i->partial_result_cnt++;
3101                 i->bUrgentReceived = 1;
3102                 // Extract 3A metadata
3103                 result.result = translateCbUrgentMetadataToResultMetadata(
3104                         metadata, lastUrgentMetadataInBatch);
3105                 // Populate metadata result
3106                 result.frame_number = urgent_frame_number;
3107                 result.num_output_buffers = 0;
3108                 result.output_buffers = NULL;
3109                 result.partial_result = i->partial_result_cnt;
3110 
3111                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3112                 LOGD("urgent frame_number = %u, capture_time = %lld",
3113                       result.frame_number, capture_time);
3114                 free_camera_metadata((camera_metadata_t *)result.result);
3115                 break;
3116             }
3117         }
3118     }
3119 
3120     if (!frame_number_valid) {
3121         LOGD("Not a valid normal frame number, used as SOF only");
3122         if (free_and_bufdone_meta_buf) {
3123             mMetadataChannel->bufDone(metadata_buf);
3124             free(metadata_buf);
3125         }
3126         goto done_metadata;
3127     }
3128     LOGH("valid frame_number = %u, capture_time = %lld",
3129             frame_number, capture_time);
3130 
3131     for (pendingRequestIterator i = mPendingRequestsList.begin();
3132             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3133         // Flush out all entries with less or equal frame numbers.
3134 
3135         camera3_capture_result_t result;
3136         memset(&result, 0, sizeof(camera3_capture_result_t));
3137 
3138         LOGD("frame_number in the list is %u", i->frame_number);
3139         i->partial_result_cnt++;
3140         result.partial_result = i->partial_result_cnt;
3141 
3142         // Check whether any stream buffer corresponding to this is dropped or not
3143         // If dropped, then send the ERROR_BUFFER for the corresponding stream
3144         // The API does not expect a blob buffer to be dropped
3145         if (p_cam_frame_drop) {
3146             /* Clear notify_msg structure */
3147             camera3_notify_msg_t notify_msg;
3148             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3149             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3150                     j != i->buffers.end(); j++) {
3151                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3152                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3153                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3154                     if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3155                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3156                         LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3157                                 __func__, i->frame_number, streamID, j->stream->format);
3158                         notify_msg.type = CAMERA3_MSG_ERROR;
3159                         notify_msg.message.error.frame_number = i->frame_number;
3160                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3161                         notify_msg.message.error.error_stream = j->stream;
3162                         mCallbackOps->notify(mCallbackOps, &notify_msg);
3163                         LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3164                                 __func__, i->frame_number, streamID, j->stream->format);
3165                         PendingFrameDropInfo PendingFrameDrop;
3166                         PendingFrameDrop.frame_number=i->frame_number;
3167                         PendingFrameDrop.stream_ID = streamID;
3168                         // Add the Frame drop info to mPendingFrameDropList
3169                         mPendingFrameDropList.push_back(PendingFrameDrop);
3170                    }
3171                }
3172             }
3173         }
3174 
3175         // Send empty metadata with already filled buffers for dropped metadata
3176         // and send valid metadata with already filled buffers for current metadata
3177         /* we could hit this case when we either
3178          * 1. have a pending reprocess request or
3179          * 2. miss a metadata buffer callback */
3180         if (i->frame_number < frame_number) {
3181             if (i->input_buffer) {
3182                 /* this will be handled in handleInputBufferWithLock */
3183                 i++;
3184                 continue;
3185             } else {
3186 
3187                 CameraMetadata dummyMetadata;
3188                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3189                 result.result = dummyMetadata.release();
3190 
3191                 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3192 
3193                 // partial_result should be PARTIAL_RESULT_CNT in case of
3194                 // ERROR_RESULT.
3195                 i->partial_result_cnt = PARTIAL_RESULT_COUNT;
3196                 result.partial_result = PARTIAL_RESULT_COUNT;
3197             }
3198         } else {
3199             mPendingLiveRequest--;
3200             /* Clear notify_msg structure */
3201             camera3_notify_msg_t notify_msg;
3202             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3203 
3204             // Send shutter notify to frameworks
3205             notify_msg.type = CAMERA3_MSG_SHUTTER;
3206             notify_msg.message.shutter.frame_number = i->frame_number;
3207             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3208             mCallbackOps->notify(mCallbackOps, &notify_msg);
3209 
3210             i->timestamp = capture_time;
3211 
3212             /* Set the timestamp in display metadata so that clients aware of
3213                private_handle such as VT can use this un-modified timestamps.
3214                Camera framework is unaware of this timestamp and cannot change this */
3215             updateTimeStampInPendingBuffers(i->frame_number, capture_time_av);
3216 
3217             // Find channel requiring metadata, meaning internal offline postprocess
3218             // is needed.
3219             //TODO: for now, we don't support two streams requiring metadata at the same time.
3220             // (because we are not making copies, and metadata buffer is not reference counted.
3221             bool internalPproc = false;
3222             for (pendingBufferIterator iter = i->buffers.begin();
3223                     iter != i->buffers.end(); iter++) {
3224                 if (iter->need_metadata) {
3225                     internalPproc = true;
3226                     QCamera3ProcessingChannel *channel =
3227                             (QCamera3ProcessingChannel *)iter->stream->priv;
3228                     channel->queueReprocMetadata(metadata_buf);
3229                     break;
3230                 }
3231             }
3232 
3233             // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3234             result.result = translateFromHalMetadata(metadata,
3235                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3236                     i->capture_intent, i->hybrid_ae_enable,
3237                      /* DevCamDebug metadata translateFromHalMetadata function call*/
3238                     i->DevCamDebug_meta_enable,
3239                     /* DevCamDebug metadata end */
3240                     internalPproc, i->fwkCacMode,
3241                     lastMetadataInBatch);
3242             // atrace_end(ATRACE_TAG_ALWAYS);
3243 
3244             saveExifParams(metadata);
3245 
3246             if (i->blob_request) {
3247                 {
3248                     //Dump tuning metadata if enabled and available
3249                     char prop[PROPERTY_VALUE_MAX];
3250                     memset(prop, 0, sizeof(prop));
3251                     property_get("persist.camera.dumpmetadata", prop, "0");
3252                     int32_t enabled = atoi(prop);
3253                     if (enabled && metadata->is_tuning_params_valid) {
3254                         dumpMetadataToFile(metadata->tuning_params,
3255                                mMetaFrameCount,
3256                                enabled,
3257                                "Snapshot",
3258                                frame_number);
3259                     }
3260                 }
3261             }
3262 
3263             if (!internalPproc) {
3264                 LOGD("couldn't find need_metadata for this metadata");
3265                 // Return metadata buffer
3266                 if (free_and_bufdone_meta_buf) {
3267                     mMetadataChannel->bufDone(metadata_buf);
3268                     free(metadata_buf);
3269                 }
3270             }
3271         }
3272         if (!result.result) {
3273             LOGE("metadata is NULL");
3274         }
3275         result.frame_number = i->frame_number;
3276         result.input_buffer = i->input_buffer;
3277         result.num_output_buffers = 0;
3278         result.output_buffers = NULL;
3279         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3280                     j != i->buffers.end(); j++) {
3281             if (j->buffer) {
3282                 result.num_output_buffers++;
3283             }
3284         }
3285 
3286         updateFpsInPreviewBuffer(metadata, i->frame_number);
3287 
3288         if (result.num_output_buffers > 0) {
3289             camera3_stream_buffer_t *result_buffers =
3290                 new camera3_stream_buffer_t[result.num_output_buffers];
3291             if (result_buffers != NULL) {
3292                 size_t result_buffers_idx = 0;
3293                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3294                         j != i->buffers.end(); j++) {
3295                     if (j->buffer) {
3296                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3297                                 m != mPendingFrameDropList.end(); m++) {
3298                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3299                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3300                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3301                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3302                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3303                                         frame_number, streamID);
3304                                 m = mPendingFrameDropList.erase(m);
3305                                 break;
3306                             }
3307                         }
3308                         j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3309                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
3310                         result_buffers[result_buffers_idx++] = *(j->buffer);
3311                         free(j->buffer);
3312                         j->buffer = NULL;
3313                     }
3314                 }
3315 
3316                 result.output_buffers = result_buffers;
3317                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3318                 LOGD("meta frame_number = %u, capture_time = %lld",
3319                         result.frame_number, i->timestamp);
3320                 free_camera_metadata((camera_metadata_t *)result.result);
3321                 delete[] result_buffers;
3322             }else {
3323                 LOGE("Fatal error: out of memory");
3324             }
3325         } else {
3326             mCallbackOps->process_capture_result(mCallbackOps, &result);
3327             LOGD("meta frame_number = %u, capture_time = %lld",
3328                     result.frame_number, i->timestamp);
3329             free_camera_metadata((camera_metadata_t *)result.result);
3330         }
3331 
3332         i = erasePendingRequest(i);
3333 
3334         if (!mPendingReprocessResultList.empty()) {
3335             handlePendingReprocResults(frame_number + 1);
3336         }
3337     }
3338 
3339 done_metadata:
3340     for (pendingRequestIterator i = mPendingRequestsList.begin();
3341             i != mPendingRequestsList.end() ;i++) {
3342         i->pipeline_depth++;
3343     }
3344     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3345     unblockRequestIfNecessary();
3346 }
3347 
3348 /*===========================================================================
3349  * FUNCTION   : hdrPlusPerfLock
3350  *
3351  * DESCRIPTION: perf lock for HDR+ using custom intent
3352  *
3353  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3354  *
3355  * RETURN     : None
3356  *
3357  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)3358 void QCamera3HardwareInterface::hdrPlusPerfLock(
3359         mm_camera_super_buf_t *metadata_buf)
3360 {
3361     if (NULL == metadata_buf) {
3362         LOGE("metadata_buf is NULL");
3363         return;
3364     }
3365     metadata_buffer_t *metadata =
3366             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367     int32_t *p_frame_number_valid =
3368             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3369     uint32_t *p_frame_number =
3370             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3371 
3372     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3373         LOGE("%s: Invalid metadata", __func__);
3374         return;
3375     }
3376 
3377     //acquire perf lock for 5 sec after the last HDR frame is captured
3378     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3379         if ((p_frame_number != NULL) &&
3380                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3381             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3382         }
3383     }
3384 
3385     //release lock after perf lock timer is expired. If lock is already released,
3386     //isTimerReset returns false
3387     if (m_perfLock.isTimerReset()) {
3388         mLastCustIntentFrmNum = -1;
3389         m_perfLock.lock_rel_timed();
3390     }
3391 }
3392 
3393 /*===========================================================================
3394  * FUNCTION   : handleInputBufferWithLock
3395  *
3396  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3397  *
3398  * PARAMETERS : @frame_number: frame number of the input buffer
3399  *
3400  * RETURN     :
3401  *
3402  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)3403 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3404 {
3405     ATRACE_CALL();
3406     pendingRequestIterator i = mPendingRequestsList.begin();
3407     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3408         i++;
3409     }
3410     if (i != mPendingRequestsList.end() && i->input_buffer) {
3411         //found the right request
3412         if (!i->shutter_notified) {
3413             CameraMetadata settings;
3414             camera3_notify_msg_t notify_msg;
3415             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3416             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3417             if(i->settings) {
3418                 settings = i->settings;
3419                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3420                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3421                 } else {
3422                     LOGE("No timestamp in input settings! Using current one.");
3423                 }
3424             } else {
3425                 LOGE("Input settings missing!");
3426             }
3427 
3428             notify_msg.type = CAMERA3_MSG_SHUTTER;
3429             notify_msg.message.shutter.frame_number = frame_number;
3430             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3431             mCallbackOps->notify(mCallbackOps, &notify_msg);
3432             i->shutter_notified = true;
3433             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3434                         i->frame_number, notify_msg.message.shutter.timestamp);
3435         }
3436 
3437         if (i->input_buffer->release_fence != -1) {
3438            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3439            close(i->input_buffer->release_fence);
3440            if (rc != OK) {
3441                LOGE("input buffer sync wait failed %d", rc);
3442            }
3443         }
3444 
3445         camera3_capture_result result;
3446         memset(&result, 0, sizeof(camera3_capture_result));
3447         result.frame_number = frame_number;
3448         result.result = i->settings;
3449         result.input_buffer = i->input_buffer;
3450         result.partial_result = PARTIAL_RESULT_COUNT;
3451 
3452         mCallbackOps->process_capture_result(mCallbackOps, &result);
3453         LOGD("Input request metadata and input buffer frame_number = %u",
3454                         i->frame_number);
3455         i = erasePendingRequest(i);
3456     } else {
3457         LOGE("Could not find input request for frame number %d", frame_number);
3458     }
3459 }
3460 
3461 /*===========================================================================
3462  * FUNCTION   : handleBufferWithLock
3463  *
3464  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3465  *
3466  * PARAMETERS : @buffer: image buffer for the callback
3467  *              @frame_number: frame number of the image buffer
3468  *
3469  * RETURN     :
3470  *
3471  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)3472 void QCamera3HardwareInterface::handleBufferWithLock(
3473     camera3_stream_buffer_t *buffer, uint32_t frame_number)
3474 {
3475     ATRACE_CALL();
3476     /* Nothing to be done during error state */
3477     if ((ERROR == mState) || (DEINIT == mState)) {
3478         return;
3479     }
3480     if (mFlushPerf) {
3481         handleBuffersDuringFlushLock(buffer);
3482         return;
3483     }
3484     //not in flush
3485     // If the frame number doesn't exist in the pending request list,
3486     // directly send the buffer to the frameworks, and update pending buffers map
3487     // Otherwise, book-keep the buffer.
3488     pendingRequestIterator i = mPendingRequestsList.begin();
3489     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3490         i++;
3491     }
3492     if (i == mPendingRequestsList.end()) {
3493         // Verify all pending requests frame_numbers are greater
3494         for (pendingRequestIterator j = mPendingRequestsList.begin();
3495                 j != mPendingRequestsList.end(); j++) {
3496             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3497                 LOGW("Error: pending live frame number %d is smaller than %d",
3498                          j->frame_number, frame_number);
3499             }
3500         }
3501         camera3_capture_result_t result;
3502         memset(&result, 0, sizeof(camera3_capture_result_t));
3503         result.result = NULL;
3504         result.frame_number = frame_number;
3505         result.num_output_buffers = 1;
3506         result.partial_result = 0;
3507         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3508                 m != mPendingFrameDropList.end(); m++) {
3509             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3510             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3511             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3512                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3513                 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3514                          frame_number, streamID);
3515                 m = mPendingFrameDropList.erase(m);
3516                 break;
3517             }
3518         }
3519         buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3520         result.output_buffers = buffer;
3521         LOGH("result frame_number = %d, buffer = %p",
3522                  frame_number, buffer->buffer);
3523 
3524         mPendingBuffersMap.removeBuf(buffer->buffer);
3525 
3526         mCallbackOps->process_capture_result(mCallbackOps, &result);
3527     } else {
3528         if (i->input_buffer) {
3529             CameraMetadata settings;
3530             camera3_notify_msg_t notify_msg;
3531             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3532             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3533             if(i->settings) {
3534                 settings = i->settings;
3535                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3536                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3537                 } else {
3538                     LOGW("No timestamp in input settings! Using current one.");
3539                 }
3540             } else {
3541                 LOGE("Input settings missing!");
3542             }
3543 
3544             notify_msg.type = CAMERA3_MSG_SHUTTER;
3545             notify_msg.message.shutter.frame_number = frame_number;
3546             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3547 
3548             if (i->input_buffer->release_fence != -1) {
3549                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3550                close(i->input_buffer->release_fence);
3551                if (rc != OK) {
3552                    LOGE("input buffer sync wait failed %d", rc);
3553                }
3554             }
3555             buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3556             mPendingBuffersMap.removeBuf(buffer->buffer);
3557 
3558             bool notifyNow = true;
3559             for (pendingRequestIterator j = mPendingRequestsList.begin();
3560                     j != mPendingRequestsList.end(); j++) {
3561                 if (j->frame_number < frame_number) {
3562                     notifyNow = false;
3563                     break;
3564                 }
3565             }
3566 
3567             if (notifyNow) {
3568                 camera3_capture_result result;
3569                 memset(&result, 0, sizeof(camera3_capture_result));
3570                 result.frame_number = frame_number;
3571                 result.result = i->settings;
3572                 result.input_buffer = i->input_buffer;
3573                 result.num_output_buffers = 1;
3574                 result.output_buffers = buffer;
3575                 result.partial_result = PARTIAL_RESULT_COUNT;
3576 
3577                 mCallbackOps->notify(mCallbackOps, &notify_msg);
3578                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3579                 LOGD("Notify reprocess now %d!", frame_number);
3580                 i = erasePendingRequest(i);
3581             } else {
3582                 // Cache reprocess result for later
3583                 PendingReprocessResult pendingResult;
3584                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3585                 pendingResult.notify_msg = notify_msg;
3586                 pendingResult.buffer = *buffer;
3587                 pendingResult.frame_number = frame_number;
3588                 mPendingReprocessResultList.push_back(pendingResult);
3589                 LOGD("Cache reprocess result %d!", frame_number);
3590             }
3591         } else {
3592             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3593                 j != i->buffers.end(); j++) {
3594                 if (j->stream == buffer->stream) {
3595                     if (j->buffer != NULL) {
3596                         LOGE("Error: buffer is already set");
3597                     } else {
3598                         j->buffer = (camera3_stream_buffer_t *)malloc(
3599                             sizeof(camera3_stream_buffer_t));
3600                         *(j->buffer) = *buffer;
3601                         LOGH("cache buffer %p at result frame_number %u",
3602                              buffer->buffer, frame_number);
3603                     }
3604                 }
3605             }
3606         }
3607     }
3608 }
3609 
3610 /*===========================================================================
3611  * FUNCTION   : unblockRequestIfNecessary
3612  *
3613  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3614  *              that mMutex is held when this function is called.
3615  *
3616  * PARAMETERS :
3617  *
3618  * RETURN     :
3619  *
3620  *==========================================================================*/
unblockRequestIfNecessary()3621 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3622 {
3623    // Unblock process_capture_request
3624    pthread_cond_signal(&mRequestCond);
3625 }
3626 
3627 
3628 /*===========================================================================
3629  * FUNCTION   : processCaptureRequest
3630  *
3631  * DESCRIPTION: process a capture request from camera service
3632  *
3633  * PARAMETERS :
3634  *   @request : request from framework to process
3635  *
3636  * RETURN     :
3637  *
3638  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3639 int QCamera3HardwareInterface::processCaptureRequest(
3640                     camera3_capture_request_t *request)
3641 {
3642     ATRACE_CALL();
3643     int rc = NO_ERROR;
3644     int32_t request_id;
3645     CameraMetadata meta;
3646     bool isVidBufRequested = false;
3647     camera3_stream_buffer_t *pInputBuffer = NULL;
3648 
3649     pthread_mutex_lock(&mMutex);
3650 
3651     // Validate current state
3652     switch (mState) {
3653         case CONFIGURED:
3654         case STARTED:
3655             /* valid state */
3656             break;
3657 
3658         case ERROR:
3659             pthread_mutex_unlock(&mMutex);
3660             handleCameraDeviceError();
3661             return -ENODEV;
3662 
3663         default:
3664             LOGE("Invalid state %d", mState);
3665             pthread_mutex_unlock(&mMutex);
3666             return -ENODEV;
3667     }
3668 
3669     rc = validateCaptureRequest(request);
3670     if (rc != NO_ERROR) {
3671         LOGE("incoming request is not valid");
3672         pthread_mutex_unlock(&mMutex);
3673         return rc;
3674     }
3675 
3676     meta = request->settings;
3677 
3678     // For first capture request, send capture intent, and
3679     // stream on all streams
3680     if (mState == CONFIGURED) {
3681         // send an unconfigure to the backend so that the isp
3682         // resources are deallocated
3683         if (!mFirstConfiguration) {
3684             cam_stream_size_info_t stream_config_info;
3685             int32_t hal_version = CAM_HAL_V3;
3686             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3687             stream_config_info.buffer_info.min_buffers =
3688                     MIN_INFLIGHT_REQUESTS;
3689             stream_config_info.buffer_info.max_buffers =
3690                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3691             clear_metadata_buffer(mParameters);
3692             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3693                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3694             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3695                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3696             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3697                     mParameters);
3698             if (rc < 0) {
3699                 LOGE("set_parms for unconfigure failed");
3700                 pthread_mutex_unlock(&mMutex);
3701                 return rc;
3702             }
3703         }
3704         m_perfLock.lock_acq();
3705         /* get eis information for stream configuration */
3706         cam_is_type_t is_type;
3707         char is_type_value[PROPERTY_VALUE_MAX];
3708         property_get("persist.camera.is_type", is_type_value, "0");
3709         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3710 
3711         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3712             int32_t hal_version = CAM_HAL_V3;
3713             uint8_t captureIntent =
3714                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3715             mCaptureIntent = captureIntent;
3716             clear_metadata_buffer(mParameters);
3717             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3718             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3719         }
3720 
3721         //If EIS is enabled, turn it on for video
3722         bool setEis = m_bEisEnable && m_bEisSupportedSize && !meta.exists(QCAMERA3_USE_AV_TIMER);
3723         int32_t vsMode;
3724         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3725         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3726             rc = BAD_VALUE;
3727         }
3728 
3729         //IS type will be 0 unless EIS is supported. If EIS is supported
3730         //it could either be 1 or 4 depending on the stream and video size
3731         if (setEis) {
3732             if (!m_bEisSupportedSize) {
3733                 is_type = IS_TYPE_DIS;
3734             } else {
3735                 is_type = IS_TYPE_EIS_2_0;
3736             }
3737             mStreamConfigInfo.is_type = is_type;
3738         } else {
3739             mStreamConfigInfo.is_type = IS_TYPE_NONE;
3740         }
3741 
3742         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3743                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3744         int32_t tintless_value = 1;
3745         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3746                 CAM_INTF_PARM_TINTLESS, tintless_value);
3747         //Disable CDS for HFR mode or if DIS/EIS is on.
3748         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3749         //after every configure_stream
3750         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3751                 (m_bIsVideo)) {
3752             int32_t cds = CAM_CDS_MODE_OFF;
3753             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3754                     CAM_INTF_PARM_CDS_MODE, cds))
3755                 LOGE("Failed to disable CDS for HFR mode");
3756 
3757         }
3758 
3759         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3760             uint8_t* use_av_timer = NULL;
3761 
3762             if (m_debug_avtimer){
3763                 use_av_timer = &m_debug_avtimer;
3764             }
3765             else{
3766                 use_av_timer =
3767                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3768             }
3769 
3770             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3771                 rc = BAD_VALUE;
3772             }
3773         }
3774 
3775         setMobicat();
3776 
3777         /* Set fps and hfr mode while sending meta stream info so that sensor
3778          * can configure appropriate streaming mode */
3779         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3780         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3781         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3782         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3783             rc = setHalFpsRange(meta, mParameters);
3784             if (rc == NO_ERROR) {
3785                 int32_t max_fps =
3786                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3787                 if (mBatchSize) {
3788                     /* For HFR, more buffers are dequeued upfront to improve the performance */
3789                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3790                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3791                 } else if (max_fps == 60) {
3792                     /* for 60 fps usecas increase inflight requests */
3793                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3794                     mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3795                 } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3796                     /* for non 60 fps video use cases, set min = max inflight requests to
3797                     avoid frame drops due to degraded system performance */
3798                     mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3799                 }
3800             }
3801             else {
3802                 LOGE("setHalFpsRange failed");
3803             }
3804         }
3805         if (meta.exists(ANDROID_CONTROL_MODE)) {
3806             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3807             rc = extractSceneMode(meta, metaMode, mParameters);
3808             if (rc != NO_ERROR) {
3809                 LOGE("extractSceneMode failed");
3810             }
3811         }
3812         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3813 
3814 
3815         //TODO: validate the arguments, HSV scenemode should have only the
3816         //advertised fps ranges
3817 
3818         /*set the capture intent, hal version, tintless, stream info,
3819          *and disenable parameters to the backend*/
3820         LOGD("set_parms META_STREAM_INFO " );
3821         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3822             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3823                     "Format:%d",
3824                     mStreamConfigInfo.type[i],
3825                     mStreamConfigInfo.stream_sizes[i].width,
3826                     mStreamConfigInfo.stream_sizes[i].height,
3827                     mStreamConfigInfo.postprocess_mask[i],
3828                     mStreamConfigInfo.format[i]);
3829         }
3830 
3831         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3832                     mParameters);
3833         if (rc < 0) {
3834             LOGE("set_parms failed for hal version, stream info");
3835         }
3836 
3837         cam_dimension_t sensor_dim;
3838         memset(&sensor_dim, 0, sizeof(sensor_dim));
3839         rc = getSensorOutputSize(sensor_dim);
3840         if (rc != NO_ERROR) {
3841             LOGE("Failed to get sensor output size");
3842             pthread_mutex_unlock(&mMutex);
3843             goto error_exit;
3844         }
3845 
3846         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3847                 gCamCapability[mCameraId]->active_array_size.height,
3848                 sensor_dim.width, sensor_dim.height);
3849 
3850         /* Set batchmode before initializing channel. Since registerBuffer
3851          * internally initializes some of the channels, better set batchmode
3852          * even before first register buffer */
3853         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3854             it != mStreamInfo.end(); it++) {
3855             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3856             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3857                     && mBatchSize) {
3858                 rc = channel->setBatchSize(mBatchSize);
3859                 //Disable per frame map unmap for HFR/batchmode case
3860                 rc |= channel->setPerFrameMapUnmap(false);
3861                 if (NO_ERROR != rc) {
3862                     LOGE("Channel init failed %d", rc);
3863                     pthread_mutex_unlock(&mMutex);
3864                     goto error_exit;
3865                 }
3866             }
3867         }
3868 
3869         //First initialize all streams
3870         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3871             it != mStreamInfo.end(); it++) {
3872             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3873             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3874                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3875                setEis)
3876                 rc = channel->initialize(is_type);
3877             else {
3878                 rc = channel->initialize(IS_TYPE_NONE);
3879             }
3880             if (NO_ERROR != rc) {
3881                 LOGE("Channel initialization failed %d", rc);
3882                 pthread_mutex_unlock(&mMutex);
3883                 goto error_exit;
3884             }
3885         }
3886 
3887         if (mRawDumpChannel) {
3888             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3889             if (rc != NO_ERROR) {
3890                 LOGE("Error: Raw Dump Channel init failed");
3891                 pthread_mutex_unlock(&mMutex);
3892                 goto error_exit;
3893             }
3894         }
3895         if (mSupportChannel) {
3896             rc = mSupportChannel->initialize(IS_TYPE_NONE);
3897             if (rc < 0) {
3898                 LOGE("Support channel initialization failed");
3899                 pthread_mutex_unlock(&mMutex);
3900                 goto error_exit;
3901             }
3902         }
3903         if (mAnalysisChannel) {
3904             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3905             if (rc < 0) {
3906                 LOGE("Analysis channel initialization failed");
3907                 pthread_mutex_unlock(&mMutex);
3908                 goto error_exit;
3909             }
3910         }
3911         if (mDummyBatchChannel) {
3912             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3913             if (rc < 0) {
3914                 LOGE("mDummyBatchChannel setBatchSize failed");
3915                 pthread_mutex_unlock(&mMutex);
3916                 goto error_exit;
3917             }
3918             rc = mDummyBatchChannel->initialize(is_type);
3919             if (rc < 0) {
3920                 LOGE("mDummyBatchChannel initialization failed");
3921                 pthread_mutex_unlock(&mMutex);
3922                 goto error_exit;
3923             }
3924         }
3925 
3926         // Set bundle info
3927         rc = setBundleInfo();
3928         if (rc < 0) {
3929             LOGE("setBundleInfo failed %d", rc);
3930             pthread_mutex_unlock(&mMutex);
3931             goto error_exit;
3932         }
3933 
3934         //update settings from app here
3935         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3936             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3937             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3938         }
3939         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3940             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3941             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3942         }
3943         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3944             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3945             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3946 
3947             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3948                 (mLinkedCameraId != mCameraId) ) {
3949                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3950                     mLinkedCameraId, mCameraId);
3951                 pthread_mutex_unlock(&mMutex);
3952                 goto error_exit;
3953             }
3954         }
3955 
3956         // add bundle related cameras
3957         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3958         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3959             if (mIsDeviceLinked)
3960                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3961             else
3962                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3963 
3964             pthread_mutex_lock(&gCamLock);
3965 
3966             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3967                 LOGE("Dualcam: Invalid Session Id ");
3968                 pthread_mutex_unlock(&gCamLock);
3969                 pthread_mutex_unlock(&mMutex);
3970                 goto error_exit;
3971             }
3972 
3973             if (mIsMainCamera == 1) {
3974                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3975                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3976                 // related session id should be session id of linked session
3977                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3978             } else {
3979                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3980                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3981                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3982             }
3983             pthread_mutex_unlock(&gCamLock);
3984 
3985             rc = mCameraHandle->ops->sync_related_sensors(
3986                     mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3987             if (rc < 0) {
3988                 LOGE("Dualcam: link failed");
3989                 pthread_mutex_unlock(&mMutex);
3990                 goto error_exit;
3991             }
3992         }
3993 
3994         //Then start them.
3995         LOGH("Start META Channel");
3996         rc = mMetadataChannel->start();
3997         if (rc < 0) {
3998             LOGE("META channel start failed");
3999             pthread_mutex_unlock(&mMutex);
4000             goto error_exit;
4001         }
4002 
4003         if (mAnalysisChannel) {
4004             rc = mAnalysisChannel->start();
4005             if (rc < 0) {
4006                 LOGE("Analysis channel start failed");
4007                 mMetadataChannel->stop();
4008                 pthread_mutex_unlock(&mMutex);
4009                 goto error_exit;
4010             }
4011         }
4012 
4013         if (mSupportChannel) {
4014             rc = mSupportChannel->start();
4015             if (rc < 0) {
4016                 LOGE("Support channel start failed");
4017                 mMetadataChannel->stop();
4018                 /* Although support and analysis are mutually exclusive today
4019                    adding it in anycase for future proofing */
4020                 if (mAnalysisChannel) {
4021                     mAnalysisChannel->stop();
4022                 }
4023                 pthread_mutex_unlock(&mMutex);
4024                 goto error_exit;
4025             }
4026         }
4027         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4028             it != mStreamInfo.end(); it++) {
4029             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4030             LOGH("Start Processing Channel mask=%d",
4031                      channel->getStreamTypeMask());
4032             rc = channel->start();
4033             if (rc < 0) {
4034                 LOGE("channel start failed");
4035                 pthread_mutex_unlock(&mMutex);
4036                 goto error_exit;
4037             }
4038         }
4039 
4040         if (mRawDumpChannel) {
4041             LOGD("Starting raw dump stream");
4042             rc = mRawDumpChannel->start();
4043             if (rc != NO_ERROR) {
4044                 LOGE("Error Starting Raw Dump Channel");
4045                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4046                       it != mStreamInfo.end(); it++) {
4047                     QCamera3Channel *channel =
4048                         (QCamera3Channel *)(*it)->stream->priv;
4049                     LOGH("Stopping Processing Channel mask=%d",
4050                         channel->getStreamTypeMask());
4051                     channel->stop();
4052                 }
4053                 if (mSupportChannel)
4054                     mSupportChannel->stop();
4055                 if (mAnalysisChannel) {
4056                     mAnalysisChannel->stop();
4057                 }
4058                 mMetadataChannel->stop();
4059                 pthread_mutex_unlock(&mMutex);
4060                 goto error_exit;
4061             }
4062         }
4063 
4064         if (mChannelHandle) {
4065 
4066             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4067                     mChannelHandle);
4068             if (rc != NO_ERROR) {
4069                 LOGE("start_channel failed %d", rc);
4070                 pthread_mutex_unlock(&mMutex);
4071                 goto error_exit;
4072             }
4073         }
4074 
4075         goto no_error;
4076 error_exit:
4077         m_perfLock.lock_rel();
4078         return rc;
4079 no_error:
4080         m_perfLock.lock_rel();
4081 
4082         mWokenUpByDaemon = false;
4083         mPendingLiveRequest = 0;
4084         mFirstConfiguration = false;
4085         enablePowerHint();
4086     }
4087 
4088     uint32_t frameNumber = request->frame_number;
4089     cam_stream_ID_t streamsArray;
4090 
4091     if (mFlushPerf) {
4092         //we cannot accept any requests during flush
4093         LOGE("process_capture_request cannot proceed during flush");
4094         pthread_mutex_unlock(&mMutex);
4095         return NO_ERROR; //should return an error
4096     }
4097 
4098     if (meta.exists(ANDROID_REQUEST_ID)) {
4099         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4100         mCurrentRequestId = request_id;
4101         LOGD("Received request with id: %d", request_id);
4102     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4103         LOGE("Unable to find request id field, \
4104                 & no previous id available");
4105         pthread_mutex_unlock(&mMutex);
4106         return NAME_NOT_FOUND;
4107     } else {
4108         LOGD("Re-using old request id");
4109         request_id = mCurrentRequestId;
4110     }
4111 
4112     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4113                                     request->num_output_buffers,
4114                                     request->input_buffer,
4115                                     frameNumber);
4116     // Acquire all request buffers first
4117     streamsArray.num_streams = 0;
4118     int blob_request = 0;
4119     uint32_t snapshotStreamId = 0;
4120     for (size_t i = 0; i < request->num_output_buffers; i++) {
4121         const camera3_stream_buffer_t& output = request->output_buffers[i];
4122         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4123 
4124         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4125             //Call function to store local copy of jpeg data for encode params.
4126             blob_request = 1;
4127             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4128         }
4129 
4130         if (output.acquire_fence != -1) {
4131            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4132            close(output.acquire_fence);
4133            if (rc != OK) {
4134               LOGE("sync wait failed %d", rc);
4135               pthread_mutex_unlock(&mMutex);
4136               return rc;
4137            }
4138         }
4139 
4140         streamsArray.stream_request[streamsArray.num_streams++].streamID =
4141             channel->getStreamID(channel->getStreamTypeMask());
4142 
4143         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4144             isVidBufRequested = true;
4145         }
4146     }
4147 
4148     if (blob_request) {
4149         KPI_ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
4150     }
4151     if (blob_request && mRawDumpChannel) {
4152         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4153         streamsArray.stream_request[streamsArray.num_streams].streamID =
4154             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4155         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4156     }
4157 
4158     if(request->input_buffer == NULL) {
4159         /* Parse the settings:
4160          * - For every request in NORMAL MODE
4161          * - For every request in HFR mode during preview only case
4162          * - For first request of every batch in HFR mode during video
4163          * recording. In batchmode the same settings except frame number is
4164          * repeated in each request of the batch.
4165          */
4166         if (!mBatchSize ||
4167            (mBatchSize && !isVidBufRequested) ||
4168            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4169             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4170             if (rc < 0) {
4171                 LOGE("fail to set frame parameters");
4172                 pthread_mutex_unlock(&mMutex);
4173                 return rc;
4174             }
4175         }
4176         /* For batchMode HFR, setFrameParameters is not called for every
4177          * request. But only frame number of the latest request is parsed.
4178          * Keep track of first and last frame numbers in a batch so that
4179          * metadata for the frame numbers of batch can be duplicated in
4180          * handleBatchMetadta */
4181         if (mBatchSize) {
4182             if (!mToBeQueuedVidBufs) {
4183                 //start of the batch
4184                 mFirstFrameNumberInBatch = request->frame_number;
4185             }
4186             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4187                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4188                 LOGE("Failed to set the frame number in the parameters");
4189                 pthread_mutex_unlock(&mMutex);
4190                 return BAD_VALUE;
4191             }
4192         }
4193         if (mNeedSensorRestart) {
4194             /* Unlock the mutex as restartSensor waits on the channels to be
4195              * stopped, which in turn calls stream callback functions -
4196              * handleBufferWithLock and handleMetadataWithLock */
4197             pthread_mutex_unlock(&mMutex);
4198             rc = dynamicUpdateMetaStreamInfo();
4199             if (rc != NO_ERROR) {
4200                 LOGE("Restarting the sensor failed");
4201                 return BAD_VALUE;
4202             }
4203             mNeedSensorRestart = false;
4204             pthread_mutex_lock(&mMutex);
4205         }
4206     } else {
4207 
4208         if (request->input_buffer->acquire_fence != -1) {
4209            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4210            close(request->input_buffer->acquire_fence);
4211            if (rc != OK) {
4212               LOGE("input buffer sync wait failed %d", rc);
4213               pthread_mutex_unlock(&mMutex);
4214               return rc;
4215            }
4216         }
4217     }
4218 
4219     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4220         mLastCustIntentFrmNum = frameNumber;
4221     }
4222     /* Update pending request list and pending buffers map */
4223     PendingRequestInfo pendingRequest;
4224     pendingRequestIterator latestRequest;
4225     pendingRequest.frame_number = frameNumber;
4226     pendingRequest.num_buffers = request->num_output_buffers;
4227     pendingRequest.request_id = request_id;
4228     pendingRequest.blob_request = blob_request;
4229     pendingRequest.timestamp = 0;
4230     pendingRequest.bUrgentReceived = 0;
4231     if (request->input_buffer) {
4232         pendingRequest.input_buffer =
4233                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4234         *(pendingRequest.input_buffer) = *(request->input_buffer);
4235         pInputBuffer = pendingRequest.input_buffer;
4236     } else {
4237        pendingRequest.input_buffer = NULL;
4238        pInputBuffer = NULL;
4239     }
4240 
4241     pendingRequest.pipeline_depth = 0;
4242     pendingRequest.partial_result_cnt = 0;
4243     extractJpegMetadata(mCurJpegMeta, request);
4244     pendingRequest.jpegMetadata = mCurJpegMeta;
4245     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4246     pendingRequest.shutter_notified = false;
4247 
4248     //extract capture intent
4249     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4250         mCaptureIntent =
4251                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4252     }
4253     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4254         mHybridAeEnable =
4255                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4256     }
4257     pendingRequest.capture_intent = mCaptureIntent;
4258     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4259     /* DevCamDebug metadata processCaptureRequest */
4260     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4261         mDevCamDebugMetaEnable =
4262                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4263     }
4264     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4265     /* DevCamDebug metadata end */
4266 
4267     //extract CAC info
4268     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4269         mCacMode =
4270                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4271     }
4272     pendingRequest.fwkCacMode = mCacMode;
4273 
4274     PendingBuffersInRequest bufsForCurRequest;
4275     bufsForCurRequest.frame_number = frameNumber;
4276     // Mark current timestamp for the new request
4277     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4278 
4279     for (size_t i = 0; i < request->num_output_buffers; i++) {
4280         RequestedBufferInfo requestedBuf;
4281         memset(&requestedBuf, 0, sizeof(requestedBuf));
4282         requestedBuf.stream = request->output_buffers[i].stream;
4283         requestedBuf.buffer = NULL;
4284         pendingRequest.buffers.push_back(requestedBuf);
4285 
4286         // Add to buffer handle the pending buffers list
4287         PendingBufferInfo bufferInfo;
4288         bufferInfo.buffer = request->output_buffers[i].buffer;
4289         bufferInfo.stream = request->output_buffers[i].stream;
4290         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4291         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4292         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4293             frameNumber, bufferInfo.buffer,
4294             channel->getStreamTypeMask(), bufferInfo.stream->format);
4295     }
4296     // Add this request packet into mPendingBuffersMap
4297     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4298     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4299         mPendingBuffersMap.get_num_overall_buffers());
4300 
4301     latestRequest = mPendingRequestsList.insert(
4302             mPendingRequestsList.end(), pendingRequest);
4303     if(mFlush) {
4304         LOGI("mFlush is true");
4305         pthread_mutex_unlock(&mMutex);
4306         return NO_ERROR;
4307     }
4308 
4309     int indexUsed;
4310     // Notify metadata channel we receive a request
4311     mMetadataChannel->request(NULL, frameNumber, indexUsed);
4312 
4313     if(request->input_buffer != NULL){
4314         LOGD("Input request, frame_number %d", frameNumber);
4315         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4316         if (NO_ERROR != rc) {
4317             LOGE("fail to set reproc parameters");
4318             pthread_mutex_unlock(&mMutex);
4319             return rc;
4320         }
4321     }
4322 
4323     // Call request on other streams
4324     uint32_t streams_need_metadata = 0;
4325     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4326     for (size_t i = 0; i < request->num_output_buffers; i++) {
4327         const camera3_stream_buffer_t& output = request->output_buffers[i];
4328         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4329 
4330         if (channel == NULL) {
4331             LOGW("invalid channel pointer for stream");
4332             continue;
4333         }
4334 
4335         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4336             LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4337                       output.buffer, request->input_buffer, frameNumber);
4338             if(request->input_buffer != NULL){
4339                 rc = channel->request(output.buffer, frameNumber,
4340                         pInputBuffer, &mReprocMeta, indexUsed);
4341                 if (rc < 0) {
4342                     LOGE("Fail to request on picture channel");
4343                     pthread_mutex_unlock(&mMutex);
4344                     return rc;
4345                 }
4346             } else {
4347                 LOGD("snapshot request with buffer %p, frame_number %d",
4348                          output.buffer, frameNumber);
4349                 if (!request->settings) {
4350                     rc = channel->request(output.buffer, frameNumber,
4351                             NULL, mPrevParameters, indexUsed);
4352                 } else {
4353                     rc = channel->request(output.buffer, frameNumber,
4354                             NULL, mParameters, indexUsed);
4355                 }
4356                 if (rc < 0) {
4357                     LOGE("Fail to request on picture channel");
4358                     pthread_mutex_unlock(&mMutex);
4359                     return rc;
4360                 }
4361 
4362                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4363                 uint32_t j = 0;
4364                 for (j = 0; j < streamsArray.num_streams; j++) {
4365                     if (streamsArray.stream_request[j].streamID == streamId) {
4366                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4367                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4368                       else
4369                           streamsArray.stream_request[j].buf_index = indexUsed;
4370                         break;
4371                     }
4372                 }
4373                 if (j == streamsArray.num_streams) {
4374                     LOGE("Did not find matching stream to update index");
4375                     assert(0);
4376                 }
4377 
4378                 pendingBufferIter->need_metadata = true;
4379                 streams_need_metadata++;
4380             }
4381         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4382             bool needMetadata = false;
4383             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4384             rc = yuvChannel->request(output.buffer, frameNumber,
4385                     pInputBuffer,
4386                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4387             if (rc < 0) {
4388                 LOGE("Fail to request on YUV channel");
4389                 pthread_mutex_unlock(&mMutex);
4390                 return rc;
4391             }
4392 
4393             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4394             uint32_t j = 0;
4395             for (j = 0; j < streamsArray.num_streams; j++) {
4396                 if (streamsArray.stream_request[j].streamID == streamId) {
4397                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4398                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4399                     else
4400                         streamsArray.stream_request[j].buf_index = indexUsed;
4401                     break;
4402                 }
4403             }
4404             if (j == streamsArray.num_streams) {
4405                 LOGE("Did not find matching stream to update index");
4406                 assert(0);
4407             }
4408 
4409             pendingBufferIter->need_metadata = needMetadata;
4410             if (needMetadata)
4411                 streams_need_metadata += 1;
4412             LOGD("calling YUV channel request, need_metadata is %d",
4413                      needMetadata);
4414         } else {
4415             LOGD("request with buffer %p, frame_number %d",
4416                   output.buffer, frameNumber);
4417 
4418             rc = channel->request(output.buffer, frameNumber, indexUsed);
4419 
4420             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4421             uint32_t j = 0;
4422             for (j = 0; j < streamsArray.num_streams; j++) {
4423                 if (streamsArray.stream_request[j].streamID == streamId) {
4424                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4425                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4426                     else
4427                         streamsArray.stream_request[j].buf_index = indexUsed;
4428                     break;
4429                 }
4430             }
4431             if (j == streamsArray.num_streams) {
4432                 LOGE("Did not find matching stream to update index");
4433                 assert(0);
4434             }
4435 
4436             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4437                     && mBatchSize) {
4438                 mToBeQueuedVidBufs++;
4439                 if (mToBeQueuedVidBufs == mBatchSize) {
4440                     channel->queueBatchBuf();
4441                 }
4442             }
4443             if (rc < 0) {
4444                 LOGE("request failed");
4445                 pthread_mutex_unlock(&mMutex);
4446                 return rc;
4447             }
4448         }
4449         pendingBufferIter++;
4450     }
4451 
4452     //If 2 streams have need_metadata set to true, fail the request, unless
4453     //we copy/reference count the metadata buffer
4454     if (streams_need_metadata > 1) {
4455         LOGE("not supporting request in which two streams requires"
4456                 " 2 HAL metadata for reprocessing");
4457         pthread_mutex_unlock(&mMutex);
4458         return -EINVAL;
4459     }
4460 
4461     if (request->input_buffer == NULL) {
4462         /* Set the parameters to backend:
4463          * - For every request in NORMAL MODE
4464          * - For every request in HFR mode during preview only case
4465          * - Once every batch in HFR mode during video recording
4466          */
4467         if (!mBatchSize ||
4468            (mBatchSize && !isVidBufRequested) ||
4469            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4470             LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4471                      mBatchSize, isVidBufRequested,
4472                     mToBeQueuedVidBufs);
4473 
4474             if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4475                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4476                     uint32_t m = 0;
4477                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4478                         if (streamsArray.stream_request[k].streamID ==
4479                                 mBatchedStreamsArray.stream_request[m].streamID)
4480                             break;
4481                         }
4482                         if (m == mBatchedStreamsArray.num_streams) {
4483                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4484                                 streamsArray.stream_request[k].streamID;
4485                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4486                                 streamsArray.stream_request[k].buf_index;
4487                             mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4488                         }
4489                 }
4490                 streamsArray = mBatchedStreamsArray;
4491             }
4492             /* Update stream id of all the requested buffers */
4493             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4494                 LOGE("Failed to set stream type mask in the parameters");
4495                 pthread_mutex_unlock(&mMutex);
4496                 return BAD_VALUE;
4497             }
4498 
4499             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4500                     mParameters);
4501             if (rc < 0) {
4502                 LOGE("set_parms failed");
4503             }
4504             /* reset to zero coz, the batch is queued */
4505             mToBeQueuedVidBufs = 0;
4506             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4507             memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4508         } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4509             for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4510                 uint32_t m = 0;
4511                 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4512                     if (streamsArray.stream_request[k].streamID ==
4513                             mBatchedStreamsArray.stream_request[m].streamID)
4514                         break;
4515                 }
4516                 if (m == mBatchedStreamsArray.num_streams) {
4517                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4518                         streamsArray.stream_request[k].streamID;
4519                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4520                         streamsArray.stream_request[k].buf_index;
4521                     mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4522                 }
4523             }
4524         }
4525         mPendingLiveRequest++;
4526     }
4527 
4528     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4529 
4530     mState = STARTED;
4531     // Added a timed condition wait
4532     struct timespec ts;
4533     uint8_t isValidTimeout = 1;
4534     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
4535     if (rc < 0) {
4536       isValidTimeout = 0;
4537       LOGE("Error reading the real time clock!!");
4538     }
4539     else {
4540       // Make timeout as 5 sec for request to be honored
4541       ts.tv_sec += 5;
4542     }
4543     //Block on conditional variable
4544     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4545             (mState != ERROR) && (mState != DEINIT)) {
4546         if (!isValidTimeout) {
4547             LOGD("Blocking on conditional wait");
4548             pthread_cond_wait(&mRequestCond, &mMutex);
4549         }
4550         else {
4551             LOGD("Blocking on timed conditional wait");
4552             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4553             if (rc == ETIMEDOUT) {
4554                 rc = -ENODEV;
4555                 LOGE("Unblocked on timeout!!!!");
4556                 break;
4557             }
4558         }
4559         LOGD("Unblocked");
4560         if (mWokenUpByDaemon) {
4561             mWokenUpByDaemon = false;
4562             if (mPendingLiveRequest < mMaxInFlightRequests)
4563                 break;
4564         }
4565     }
4566     pthread_mutex_unlock(&mMutex);
4567 
4568     return rc;
4569 }
4570 
4571 /*===========================================================================
4572  * FUNCTION   : dump
4573  *
4574  * DESCRIPTION:
4575  *
4576  * PARAMETERS :
4577  *
4578  *
4579  * RETURN     :
4580  *==========================================================================*/
dump(int fd)4581 void QCamera3HardwareInterface::dump(int fd)
4582 {
4583     pthread_mutex_lock(&mMutex);
4584     dprintf(fd, "\n Camera HAL3 information Begin \n");
4585 
4586     dprintf(fd, "\nNumber of pending requests: %zu \n",
4587         mPendingRequestsList.size());
4588     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4589     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4590     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4591     for(pendingRequestIterator i = mPendingRequestsList.begin();
4592             i != mPendingRequestsList.end(); i++) {
4593         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4594         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4595         i->input_buffer);
4596     }
4597     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4598                 mPendingBuffersMap.get_num_overall_buffers());
4599     dprintf(fd, "-------+------------------\n");
4600     dprintf(fd, " Frame | Stream type mask \n");
4601     dprintf(fd, "-------+------------------\n");
4602     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4603         for(auto &j : req.mPendingBufferList) {
4604             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4605             dprintf(fd, " %5d | %11d \n",
4606                     req.frame_number, channel->getStreamTypeMask());
4607         }
4608     }
4609     dprintf(fd, "-------+------------------\n");
4610 
4611     dprintf(fd, "\nPending frame drop list: %zu\n",
4612         mPendingFrameDropList.size());
4613     dprintf(fd, "-------+-----------\n");
4614     dprintf(fd, " Frame | Stream ID \n");
4615     dprintf(fd, "-------+-----------\n");
4616     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4617         i != mPendingFrameDropList.end(); i++) {
4618         dprintf(fd, " %5d | %9d \n",
4619             i->frame_number, i->stream_ID);
4620     }
4621     dprintf(fd, "-------+-----------\n");
4622 
4623     dprintf(fd, "\n Camera HAL3 information End \n");
4624 
4625     /* use dumpsys media.camera as trigger to send update debug level event */
4626     mUpdateDebugLevel = true;
4627     pthread_mutex_unlock(&mMutex);
4628     return;
4629 }
4630 
4631 /*===========================================================================
4632  * FUNCTION   : flush
4633  *
4634  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4635  *              conditionally restarts channels
4636  *
4637  * PARAMETERS :
4638  *  @ restartChannels: re-start all channels
4639  *
4640  *
4641  * RETURN     :
4642  *          0 on success
4643  *          Error code on failure
4644  *==========================================================================*/
flush(bool restartChannels)4645 int QCamera3HardwareInterface::flush(bool restartChannels)
4646 {
4647     KPI_ATRACE_CALL();
4648     int32_t rc = NO_ERROR;
4649 
4650     LOGD("Unblocking Process Capture Request");
4651     pthread_mutex_lock(&mMutex);
4652     mFlush = true;
4653     pthread_mutex_unlock(&mMutex);
4654 
4655     rc = stopAllChannels();
4656     // unlink of dualcam
4657     if (mIsDeviceLinked) {
4658         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4659         pthread_mutex_lock(&gCamLock);
4660 
4661         if (mIsMainCamera == 1) {
4662             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4663             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4664             // related session id should be session id of linked session
4665             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4666         } else {
4667             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4668             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4669             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4670         }
4671         pthread_mutex_unlock(&gCamLock);
4672 
4673         rc = mCameraHandle->ops->sync_related_sensors(
4674                 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4675         if (rc < 0) {
4676             LOGE("Dualcam: Unlink failed, but still proceed to close");
4677         }
4678     }
4679 
4680     if (rc < 0) {
4681         LOGE("stopAllChannels failed");
4682         return rc;
4683     }
4684     if (mChannelHandle) {
4685         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4686                 mChannelHandle);
4687     }
4688 
4689     // Reset bundle info
4690     rc = setBundleInfo();
4691     if (rc < 0) {
4692         LOGE("setBundleInfo failed %d", rc);
4693         return rc;
4694     }
4695 
4696     // Mutex Lock
4697     pthread_mutex_lock(&mMutex);
4698 
4699     // Unblock process_capture_request
4700     mPendingLiveRequest = 0;
4701     pthread_cond_signal(&mRequestCond);
4702 
4703     rc = notifyErrorForPendingRequests();
4704     if (rc < 0) {
4705         LOGE("notifyErrorForPendingRequests failed");
4706         pthread_mutex_unlock(&mMutex);
4707         return rc;
4708     }
4709 
4710     mFlush = false;
4711 
4712     // Start the Streams/Channels
4713     if (restartChannels) {
4714         rc = startAllChannels();
4715         if (rc < 0) {
4716             LOGE("startAllChannels failed");
4717             pthread_mutex_unlock(&mMutex);
4718             return rc;
4719         }
4720     }
4721 
4722     if (mChannelHandle) {
4723         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4724                     mChannelHandle);
4725         if (rc < 0) {
4726             LOGE("start_channel failed");
4727             pthread_mutex_unlock(&mMutex);
4728             return rc;
4729         }
4730     }
4731 
4732     pthread_mutex_unlock(&mMutex);
4733 
4734     return 0;
4735 }
4736 
4737 /*===========================================================================
4738  * FUNCTION   : flushPerf
4739  *
4740  * DESCRIPTION: This is the performance optimization version of flush that does
4741  *              not use stream off, rather flushes the system
4742  *
4743  * PARAMETERS :
4744  *
4745  *
4746  * RETURN     : 0 : success
4747  *              -EINVAL: input is malformed (device is not valid)
4748  *              -ENODEV: if the device has encountered a serious error
4749  *==========================================================================*/
flushPerf()4750 int QCamera3HardwareInterface::flushPerf()
4751 {
4752     ATRACE_CALL();
4753     int32_t rc = 0;
4754     struct timespec timeout;
4755     bool timed_wait = false;
4756 
4757     pthread_mutex_lock(&mMutex);
4758     mFlushPerf = true;
4759     mPendingBuffersMap.numPendingBufsAtFlush =
4760         mPendingBuffersMap.get_num_overall_buffers();
4761     LOGD("Calling flush. Wait for %d buffers to return",
4762         mPendingBuffersMap.numPendingBufsAtFlush);
4763 
4764     /* send the flush event to the backend */
4765     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4766     if (rc < 0) {
4767         LOGE("Error in flush: IOCTL failure");
4768         mFlushPerf = false;
4769         pthread_mutex_unlock(&mMutex);
4770         return -ENODEV;
4771     }
4772 
4773     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4774         LOGD("No pending buffers in HAL, return flush");
4775         mFlushPerf = false;
4776         pthread_mutex_unlock(&mMutex);
4777         return rc;
4778     }
4779 
4780     /* wait on a signal that buffers were received */
4781     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
4782     if (rc < 0) {
4783         LOGE("Error reading the real time clock, cannot use timed wait");
4784     } else {
4785         timeout.tv_sec += FLUSH_TIMEOUT;
4786         timed_wait = true;
4787     }
4788 
4789     //Block on conditional variable
4790     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4791         LOGD("Waiting on mBuffersCond");
4792         if (!timed_wait) {
4793             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4794             if (rc != 0) {
4795                  LOGE("pthread_cond_wait failed due to rc = %s",
4796                         strerror(rc));
4797                  break;
4798             }
4799         } else {
4800             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4801             if (rc != 0) {
4802                 LOGE("pthread_cond_timedwait failed due to rc = %s",
4803                             strerror(rc));
4804                 break;
4805             }
4806         }
4807     }
4808     if (rc != 0) {
4809         mFlushPerf = false;
4810         pthread_mutex_unlock(&mMutex);
4811         return -ENODEV;
4812     }
4813 
4814     LOGD("Received buffers, now safe to return them");
4815 
4816     //make sure the channels handle flush
4817     //currently only required for the picture channel to release snapshot resources
4818     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4819             it != mStreamInfo.end(); it++) {
4820         QCamera3Channel *channel = (*it)->channel;
4821         if (channel) {
4822             rc = channel->flush();
4823             if (rc) {
4824                LOGE("Flushing the channels failed with error %d", rc);
4825                // even though the channel flush failed we need to continue and
4826                // return the buffers we have to the framework, however the return
4827                // value will be an error
4828                rc = -ENODEV;
4829             }
4830         }
4831     }
4832 
4833     /* notify the frameworks and send errored results */
4834     rc = notifyErrorForPendingRequests();
4835     if (rc < 0) {
4836         LOGE("notifyErrorForPendingRequests failed");
4837         pthread_mutex_unlock(&mMutex);
4838         return rc;
4839     }
4840 
4841     //unblock process_capture_request
4842     mPendingLiveRequest = 0;
4843     unblockRequestIfNecessary();
4844 
4845     mFlushPerf = false;
4846     pthread_mutex_unlock(&mMutex);
4847     LOGD ("Flush Operation complete. rc = %d", rc);
4848     return rc;
4849 }
4850 
4851 /*===========================================================================
4852  * FUNCTION   : handleCameraDeviceError
4853  *
4854  * DESCRIPTION: This function calls internal flush and notifies the error to
4855  *              framework and updates the state variable.
4856  *
4857  * PARAMETERS : None
4858  *
4859  * RETURN     : NO_ERROR on Success
4860  *              Error code on failure
4861  *==========================================================================*/
handleCameraDeviceError()4862 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4863 {
4864     int32_t rc = NO_ERROR;
4865 
4866     pthread_mutex_lock(&mMutex);
4867     if (mState != ERROR) {
4868         //if mState != ERROR, nothing to be done
4869         pthread_mutex_unlock(&mMutex);
4870         return NO_ERROR;
4871     }
4872     pthread_mutex_unlock(&mMutex);
4873 
4874     rc = flush(false /* restart channels */);
4875     if (NO_ERROR != rc) {
4876         LOGE("internal flush to handle mState = ERROR failed");
4877     }
4878 
4879     pthread_mutex_lock(&mMutex);
4880     mState = DEINIT;
4881     pthread_mutex_unlock(&mMutex);
4882 
4883     camera3_notify_msg_t notify_msg;
4884     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4885     notify_msg.type = CAMERA3_MSG_ERROR;
4886     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4887     notify_msg.message.error.error_stream = NULL;
4888     notify_msg.message.error.frame_number = 0;
4889     mCallbackOps->notify(mCallbackOps, &notify_msg);
4890 
4891     return rc;
4892 }
4893 
4894 /*===========================================================================
4895  * FUNCTION   : captureResultCb
4896  *
4897  * DESCRIPTION: Callback handler for all capture result
4898  *              (streams, as well as metadata)
4899  *
4900  * PARAMETERS :
4901  *   @metadata : metadata information
4902  *   @buffer   : actual gralloc buffer to be returned to frameworks.
4903  *               NULL if metadata.
4904  *
4905  * RETURN     : NONE
4906  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4907 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4908                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4909 {
4910     if (metadata_buf) {
4911         pthread_mutex_lock(&mMutex);
4912         uint8_t batchSize = mBatchSize;
4913         pthread_mutex_unlock(&mMutex);
4914         if (batchSize) {
4915             handleBatchMetadata(metadata_buf,
4916                     true /* free_and_bufdone_meta_buf */);
4917         } else { /* mBatchSize = 0 */
4918             hdrPlusPerfLock(metadata_buf);
4919             pthread_mutex_lock(&mMutex);
4920             handleMetadataWithLock(metadata_buf,
4921                     true /* free_and_bufdone_meta_buf */,
4922                     true /* last urgent frame of batch metadata */,
4923                     true /* last frame of batch metadata */ );
4924             pthread_mutex_unlock(&mMutex);
4925         }
4926     } else if (isInputBuffer) {
4927         pthread_mutex_lock(&mMutex);
4928         handleInputBufferWithLock(frame_number);
4929         pthread_mutex_unlock(&mMutex);
4930     } else {
4931         pthread_mutex_lock(&mMutex);
4932         handleBufferWithLock(buffer, frame_number);
4933         pthread_mutex_unlock(&mMutex);
4934     }
4935     return;
4936 }
4937 
4938 /*===========================================================================
4939  * FUNCTION   : getReprocessibleOutputStreamId
4940  *
4941  * DESCRIPTION: Get source output stream id for the input reprocess stream
4942  *              based on size and format, which would be the largest
4943  *              output stream if an input stream exists.
4944  *
4945  * PARAMETERS :
4946  *   @id      : return the stream id if found
4947  *
4948  * RETURN     : int32_t type of status
4949  *              NO_ERROR  -- success
4950  *              none-zero failure code
4951  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4952 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4953 {
4954     /* check if any output or bidirectional stream with the same size and format
4955        and return that stream */
4956     if ((mInputStreamInfo.dim.width > 0) &&
4957             (mInputStreamInfo.dim.height > 0)) {
4958         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4959                 it != mStreamInfo.end(); it++) {
4960 
4961             camera3_stream_t *stream = (*it)->stream;
4962             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4963                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4964                     (stream->format == mInputStreamInfo.format)) {
4965                 // Usage flag for an input stream and the source output stream
4966                 // may be different.
4967                 LOGD("Found reprocessible output stream! %p", *it);
4968                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4969                          stream->usage, mInputStreamInfo.usage);
4970 
4971                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4972                 if (channel != NULL && channel->mStreams[0]) {
4973                     id = channel->mStreams[0]->getMyServerID();
4974                     return NO_ERROR;
4975                 }
4976             }
4977         }
4978     } else {
4979         LOGD("No input stream, so no reprocessible output stream");
4980     }
4981     return NAME_NOT_FOUND;
4982 }
4983 
4984 /*===========================================================================
4985  * FUNCTION   : lookupFwkName
4986  *
4987  * DESCRIPTION: In case the enum is not same in fwk and backend
4988  *              make sure the parameter is correctly propogated
4989  *
4990  * PARAMETERS  :
4991  *   @arr      : map between the two enums
4992  *   @len      : len of the map
4993  *   @hal_name : name of the hal_parm to map
4994  *
4995  * RETURN     : int type of status
4996  *              fwk_name  -- success
4997  *              none-zero failure code
4998  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4999 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5000         size_t len, halType hal_name)
5001 {
5002 
5003     for (size_t i = 0; i < len; i++) {
5004         if (arr[i].hal_name == hal_name) {
5005             return arr[i].fwk_name;
5006         }
5007     }
5008 
5009     /* Not able to find matching framework type is not necessarily
5010      * an error case. This happens when mm-camera supports more attributes
5011      * than the frameworks do */
5012     LOGH("Cannot find matching framework type");
5013     return NAME_NOT_FOUND;
5014 }
5015 
5016 /*===========================================================================
5017  * FUNCTION   : lookupHalName
5018  *
5019  * DESCRIPTION: In case the enum is not same in fwk and backend
5020  *              make sure the parameter is correctly propogated
5021  *
5022  * PARAMETERS  :
5023  *   @arr      : map between the two enums
5024  *   @len      : len of the map
5025  *   @fwk_name : name of the hal_parm to map
5026  *
5027  * RETURN     : int32_t type of status
5028  *              hal_name  -- success
5029  *              none-zero failure code
5030  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)5031 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5032         size_t len, fwkType fwk_name)
5033 {
5034     for (size_t i = 0; i < len; i++) {
5035         if (arr[i].fwk_name == fwk_name) {
5036             return arr[i].hal_name;
5037         }
5038     }
5039 
5040     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5041     return NAME_NOT_FOUND;
5042 }
5043 
5044 /*===========================================================================
5045  * FUNCTION   : lookupProp
5046  *
5047  * DESCRIPTION: lookup a value by its name
5048  *
5049  * PARAMETERS :
5050  *   @arr     : map between the two enums
5051  *   @len     : size of the map
5052  *   @name    : name to be looked up
5053  *
5054  * RETURN     : Value if found
5055  *              CAM_CDS_MODE_MAX if not found
5056  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)5057 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5058         size_t len, const char *name)
5059 {
5060     if (name) {
5061         for (size_t i = 0; i < len; i++) {
5062             if (!strcmp(arr[i].desc, name)) {
5063                 return arr[i].val;
5064             }
5065         }
5066     }
5067     return CAM_CDS_MODE_MAX;
5068 }
5069 
5070 /*===========================================================================
5071  *
5072  * DESCRIPTION:
5073  *
5074  * PARAMETERS :
5075  *   @metadata : metadata information from callback
5076  *   @timestamp: metadata buffer timestamp
5077  *   @request_id: request id
5078  *   @jpegMetadata: additional jpeg metadata
5079  *   @hybrid_ae_enable: whether hybrid ae is enabled
5080  *   // DevCamDebug metadata
5081  *   @DevCamDebug_meta_enable: enable DevCamDebug meta
5082  *   // DevCamDebug metadata end
5083  *   @pprocDone: whether internal offline postprocsesing is done
5084  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
5085  *                         in a batch. Always true for non-batch mode.
5086  *
5087  * RETURN     : camera_metadata_t*
5088  *              metadata in a format specified by fwk
5089  *==========================================================================*/
5090 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,uint8_t DevCamDebug_meta_enable,bool pprocDone,uint8_t fwk_cacMode,bool lastMetadataInBatch)5091 QCamera3HardwareInterface::translateFromHalMetadata(
5092                                  metadata_buffer_t *metadata,
5093                                  nsecs_t timestamp,
5094                                  int32_t request_id,
5095                                  const CameraMetadata& jpegMetadata,
5096                                  uint8_t pipeline_depth,
5097                                  uint8_t capture_intent,
5098                                  uint8_t hybrid_ae_enable,
5099                                  /* DevCamDebug metadata translateFromHalMetadata argument */
5100                                  uint8_t DevCamDebug_meta_enable,
5101                                  /* DevCamDebug metadata end */
5102                                  bool pprocDone,
5103                                  uint8_t fwk_cacMode,
5104                                  bool lastMetadataInBatch)
5105 {
5106     CameraMetadata camMetadata;
5107     camera_metadata_t *resultMetadata;
5108 
5109     if (!lastMetadataInBatch) {
5110         /* In batch mode, use empty metadata if this is not the last in batch*/
5111         resultMetadata = allocate_camera_metadata(0, 0);
5112         return resultMetadata;
5113     }
5114 
5115     if (jpegMetadata.entryCount())
5116         camMetadata.append(jpegMetadata);
5117 
5118     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5119     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5120     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5121     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
5122     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
5123     if (mBatchSize == 0) {
5124         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5125         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5126     }
5127 
5128     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5129     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5130     if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5131         // DevCamDebug metadata translateFromHalMetadata AF
5132         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5133                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5134             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5135             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5136         }
5137         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5138                 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5139             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5140             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5141         }
5142         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5143                 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5144             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5145             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5146         }
5147         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5148                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5149             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5150             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5151         }
5152         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5153                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5154             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5155             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5156         }
5157         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5158                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5159             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5160                 *DevCamDebug_af_monitor_pdaf_target_pos;
5161             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5162                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5163         }
5164         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5165                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5166             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5167                 *DevCamDebug_af_monitor_pdaf_confidence;
5168             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5169                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5170         }
5171         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5172                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5173             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5174             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5175                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5176         }
5177         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5178                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5179             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5180                 *DevCamDebug_af_monitor_tof_target_pos;
5181             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5182                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5183         }
5184         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5185                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5186             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5187                 *DevCamDebug_af_monitor_tof_confidence;
5188             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5189                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5190         }
5191         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5192                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5193             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5194             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5195                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5196         }
5197         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5198                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5199             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5200             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5201                 &fwk_DevCamDebug_af_monitor_type_select, 1);
5202         }
5203         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5204                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5205             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5206             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5207                 &fwk_DevCamDebug_af_monitor_refocus, 1);
5208         }
5209         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5210                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5211             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5212             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5213                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5214         }
5215         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5216                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5217             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5218                 *DevCamDebug_af_search_pdaf_target_pos;
5219             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5220                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5221         }
5222         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5223                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5224             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5225             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5226                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5227         }
5228         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5229                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5230             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5231             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5232                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5233         }
5234         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5235                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5236             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5237             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5238                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5239         }
5240         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5241                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5242             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5243             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5244                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5245         }
5246         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5247                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5248             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5249                 *DevCamDebug_af_search_tof_target_pos;
5250             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5251                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5252         }
5253         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5254                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5255             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5256             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5257                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5258         }
5259         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5260                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5261             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5262             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5263                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5264         }
5265         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5266                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5267             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5268             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5269                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5270         }
5271         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5272                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5273             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5274             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5275                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5276         }
5277         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5278                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5279             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5280             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5281                 &fwk_DevCamDebug_af_search_type_select, 1);
5282         }
5283         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5284                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5285             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5286             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5287                 &fwk_DevCamDebug_af_search_next_pos, 1);
5288         }
5289         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5290                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5291             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5292             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5293                 &fwk_DevCamDebug_af_search_target_pos, 1);
5294         }
5295         // DevCamDebug metadata translateFromHalMetadata AEC
5296         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5297                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5298             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5299             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5300     }
5301         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5302                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5303             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5304             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5305         }
5306         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5307                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5308             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5309             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5310         }
5311         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5312                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5313             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5314             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5315         }
5316         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5317                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5318             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5319             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5320         }
5321         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5322                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5323             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5324             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5325         }
5326         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5327                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5328             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5329             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5330         }
5331         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5332                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5333             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5334             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5335         }
5336         // DevCamDebug metadata translateFromHalMetadata AWB
5337         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5338                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5339             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5340             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5341         }
5342         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5343                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5344             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5345             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5346         }
5347         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5348                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5349             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5350             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5351         }
5352         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5353                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5354             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5355             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5356         }
5357         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5358                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5359             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5360             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5361         }
5362     }
5363     // atrace_end(ATRACE_TAG_ALWAYS);
5364 
5365     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5366         int64_t fwk_frame_number = *frame_number;
5367         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5368     }
5369 
5370     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5371         int32_t fps_range[2];
5372         fps_range[0] = (int32_t)float_range->min_fps;
5373         fps_range[1] = (int32_t)float_range->max_fps;
5374         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5375                                       fps_range, 2);
5376         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5377              fps_range[0], fps_range[1]);
5378     }
5379 
5380     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5381         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5382     }
5383 
5384     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5385         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5386                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5387                 *sceneMode);
5388         if (NAME_NOT_FOUND != val) {
5389             uint8_t fwkSceneMode = (uint8_t)val;
5390             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5391             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5392                      fwkSceneMode);
5393         }
5394     }
5395 
5396     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5397         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5398         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5399     }
5400 
5401     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5402         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5403         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5404     }
5405 
5406     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5407         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5408         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5409     }
5410 
5411     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5412             CAM_INTF_META_EDGE_MODE, metadata) {
5413         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5414     }
5415 
5416     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5417         uint8_t fwk_flashPower = (uint8_t) *flashPower;
5418         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5419     }
5420 
5421     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5422         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5423     }
5424 
5425     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5426         if (0 <= *flashState) {
5427             uint8_t fwk_flashState = (uint8_t) *flashState;
5428             if (!gCamCapability[mCameraId]->flash_available) {
5429                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5430             }
5431             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5432         }
5433     }
5434 
5435     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5436         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5437         if (NAME_NOT_FOUND != val) {
5438             uint8_t fwk_flashMode = (uint8_t)val;
5439             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5440         }
5441     }
5442 
5443     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5444         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5445         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5446     }
5447 
5448     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5449         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5450     }
5451 
5452     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5453         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5454     }
5455 
5456     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5457         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5458     }
5459 
5460     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5461         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5462         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5463     }
5464 
5465     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5466         uint8_t fwk_videoStab = (uint8_t) *videoStab;
5467         LOGD("fwk_videoStab = %d", fwk_videoStab);
5468         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5469     } else {
5470         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5471         // and so hardcoding the Video Stab result to OFF mode.
5472         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5473         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5474         LOGD("%s: EIS result default to OFF mode", __func__);
5475     }
5476 
5477     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5478         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5479         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5480     }
5481 
5482     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5483         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5484     }
5485 
5486     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5487         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5488         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5489 
5490         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5491               gCamCapability[mCameraId]->color_arrangement);
5492 
5493         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5494           blackLevelAppliedPattern->cam_black_level[0],
5495           blackLevelAppliedPattern->cam_black_level[1],
5496           blackLevelAppliedPattern->cam_black_level[2],
5497           blackLevelAppliedPattern->cam_black_level[3]);
5498         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5499                 BLACK_LEVEL_PATTERN_CNT);
5500 
5501         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5502         // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5503         // depth space.
5504         fwk_blackLevelInd[0] /= 4.0;
5505         fwk_blackLevelInd[1] /= 4.0;
5506         fwk_blackLevelInd[2] /= 4.0;
5507         fwk_blackLevelInd[3] /= 4.0;
5508         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5509                 BLACK_LEVEL_PATTERN_CNT);
5510     }
5511 
5512     // Fixed whitelevel is used by ISP/Sensor
5513     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5514             &gCamCapability[mCameraId]->white_level, 1);
5515 
5516     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5517             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5518         int32_t scalerCropRegion[4];
5519         scalerCropRegion[0] = hScalerCropRegion->left;
5520         scalerCropRegion[1] = hScalerCropRegion->top;
5521         scalerCropRegion[2] = hScalerCropRegion->width;
5522         scalerCropRegion[3] = hScalerCropRegion->height;
5523 
5524         // Adjust crop region from sensor output coordinate system to active
5525         // array coordinate system.
5526         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5527                 scalerCropRegion[2], scalerCropRegion[3]);
5528 
5529         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5530     }
5531 
5532     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5533         LOGD("sensorExpTime = %lld", *sensorExpTime);
5534         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5535     }
5536 
5537     IF_META_AVAILABLE(int64_t, sensorFameDuration,
5538             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5539         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5540         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5541     }
5542 
5543     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5544             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5545         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5546         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5547                 sensorRollingShutterSkew, 1);
5548     }
5549 
5550     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5551         LOGD("sensorSensitivity = %d", *sensorSensitivity);
5552         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5553 
5554         //calculate the noise profile based on sensitivity
5555         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5556         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5557         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5558         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5559             noise_profile[i]   = noise_profile_S;
5560             noise_profile[i+1] = noise_profile_O;
5561         }
5562         LOGD("noise model entry (S, O) is (%f, %f)",
5563                 noise_profile_S, noise_profile_O);
5564         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5565                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5566     }
5567 
5568     int32_t fwk_ispSensitivity = 100;
5569     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5570         fwk_ispSensitivity = (int32_t) *ispSensitivity;
5571     }
5572     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5573         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5574     }
5575     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5576 
5577     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5578         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5579         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5580     }
5581 
5582     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5583         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5584                 *faceDetectMode);
5585         if (NAME_NOT_FOUND != val) {
5586             uint8_t fwk_faceDetectMode = (uint8_t)val;
5587             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5588 
5589             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5590                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5591                         CAM_INTF_META_FACE_DETECTION, metadata) {
5592                     uint8_t numFaces = MIN(
5593                             faceDetectionInfo->num_faces_detected, MAX_ROI);
5594                     int32_t faceIds[MAX_ROI];
5595                     uint8_t faceScores[MAX_ROI];
5596                     int32_t faceRectangles[MAX_ROI * 4];
5597                     int32_t faceLandmarks[MAX_ROI * 6];
5598                     size_t j = 0, k = 0;
5599 
5600                     for (size_t i = 0; i < numFaces; i++) {
5601                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5602                         // Adjust crop region from sensor output coordinate system to active
5603                         // array coordinate system.
5604                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5605                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
5606                                 rect.width, rect.height);
5607 
5608                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5609                                 faceRectangles+j, -1);
5610 
5611                         j+= 4;
5612                     }
5613                     if (numFaces <= 0) {
5614                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5615                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5616                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5617                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5618                     }
5619 
5620                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5621                             numFaces);
5622                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5623                             faceRectangles, numFaces * 4U);
5624                     if (fwk_faceDetectMode ==
5625                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5626                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5627                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
5628 
5629                             for (size_t i = 0; i < numFaces; i++) {
5630                                 // Map the co-ordinate sensor output coordinate system to active
5631                                 // array coordinate system.
5632                                 mCropRegionMapper.toActiveArray(
5633                                         landmarks->face_landmarks[i].left_eye_center.x,
5634                                         landmarks->face_landmarks[i].left_eye_center.y);
5635                                 mCropRegionMapper.toActiveArray(
5636                                         landmarks->face_landmarks[i].right_eye_center.x,
5637                                         landmarks->face_landmarks[i].right_eye_center.y);
5638                                 mCropRegionMapper.toActiveArray(
5639                                         landmarks->face_landmarks[i].mouth_center.x,
5640                                         landmarks->face_landmarks[i].mouth_center.y);
5641 
5642                                 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5643                                 k+= 6;
5644                             }
5645                         }
5646 
5647                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5648                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5649                                 faceLandmarks, numFaces * 6U);
5650                    }
5651                 }
5652             }
5653         }
5654     }
5655 
5656     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5657         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5658         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5659     }
5660 
5661     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5662             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5663         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5664         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5665     }
5666 
5667     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5668             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5669         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5670                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5671     }
5672 
5673     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5674             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5675         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5676                 CAM_MAX_SHADING_MAP_HEIGHT);
5677         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5678                 CAM_MAX_SHADING_MAP_WIDTH);
5679         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5680                 lensShadingMap->lens_shading, 4U * map_width * map_height);
5681     }
5682 
5683     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5684         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5685         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5686     }
5687 
5688     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5689         //Populate CAM_INTF_META_TONEMAP_CURVES
5690         /* ch0 = G, ch 1 = B, ch 2 = R*/
5691         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5692             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5693                      tonemap->tonemap_points_cnt,
5694                     CAM_MAX_TONEMAP_CURVE_SIZE);
5695             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5696         }
5697 
5698         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5699                         &tonemap->curves[0].tonemap_points[0][0],
5700                         tonemap->tonemap_points_cnt * 2);
5701 
5702         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5703                         &tonemap->curves[1].tonemap_points[0][0],
5704                         tonemap->tonemap_points_cnt * 2);
5705 
5706         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5707                         &tonemap->curves[2].tonemap_points[0][0],
5708                         tonemap->tonemap_points_cnt * 2);
5709     }
5710 
5711     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5712             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5713         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5714                 CC_GAINS_COUNT);
5715     }
5716 
5717     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5718             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5719         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5720                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5721                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5722     }
5723 
5724     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5725             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5726         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5727             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5728                      toneCurve->tonemap_points_cnt,
5729                     CAM_MAX_TONEMAP_CURVE_SIZE);
5730             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5731         }
5732         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5733                 (float*)toneCurve->curve.tonemap_points,
5734                 toneCurve->tonemap_points_cnt * 2);
5735     }
5736 
5737     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5738             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5739         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5740                 predColorCorrectionGains->gains, 4);
5741     }
5742 
5743     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5744             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5745         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5746                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5747                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5748     }
5749 
5750     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5751         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5752     }
5753 
5754     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5755         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5756         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5757     }
5758 
5759     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5760         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5761         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5762     }
5763 
5764     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5765         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5766                 *effectMode);
5767         if (NAME_NOT_FOUND != val) {
5768             uint8_t fwk_effectMode = (uint8_t)val;
5769             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5770         }
5771     }
5772 
5773     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5774             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5775         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5776                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5777         if (NAME_NOT_FOUND != fwk_testPatternMode) {
5778             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5779         }
5780         int32_t fwk_testPatternData[4];
5781         fwk_testPatternData[0] = testPatternData->r;
5782         fwk_testPatternData[3] = testPatternData->b;
5783         switch (gCamCapability[mCameraId]->color_arrangement) {
5784         case CAM_FILTER_ARRANGEMENT_RGGB:
5785         case CAM_FILTER_ARRANGEMENT_GRBG:
5786             fwk_testPatternData[1] = testPatternData->gr;
5787             fwk_testPatternData[2] = testPatternData->gb;
5788             break;
5789         case CAM_FILTER_ARRANGEMENT_GBRG:
5790         case CAM_FILTER_ARRANGEMENT_BGGR:
5791             fwk_testPatternData[2] = testPatternData->gr;
5792             fwk_testPatternData[1] = testPatternData->gb;
5793             break;
5794         default:
5795             LOGE("color arrangement %d is not supported",
5796                 gCamCapability[mCameraId]->color_arrangement);
5797             break;
5798         }
5799         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5800     }
5801 
5802     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5803         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5804     }
5805 
5806     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5807         String8 str((const char *)gps_methods);
5808         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5809     }
5810 
5811     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5812         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5813     }
5814 
5815     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5816         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5817     }
5818 
5819     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5820         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5821         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5822     }
5823 
5824     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5825         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5826         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5827     }
5828 
5829     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5830         int32_t fwk_thumb_size[2];
5831         fwk_thumb_size[0] = thumb_size->width;
5832         fwk_thumb_size[1] = thumb_size->height;
5833         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5834     }
5835 
5836     // Skip reprocess metadata for high speed mode.
5837     if (mBatchSize == 0) {
5838         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5839             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5840                      privateData,
5841                      MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5842         }
5843     }
5844 
5845     if (metadata->is_tuning_params_valid) {
5846         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5847         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5848         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5849 
5850 
5851         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5852                 sizeof(uint32_t));
5853         data += sizeof(uint32_t);
5854 
5855         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5856                 sizeof(uint32_t));
5857         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5858         data += sizeof(uint32_t);
5859 
5860         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5861                 sizeof(uint32_t));
5862         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5863         data += sizeof(uint32_t);
5864 
5865         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5866                 sizeof(uint32_t));
5867         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5868         data += sizeof(uint32_t);
5869 
5870         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5871                 sizeof(uint32_t));
5872         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5873         data += sizeof(uint32_t);
5874 
5875         metadata->tuning_params.tuning_mod3_data_size = 0;
5876         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5877                 sizeof(uint32_t));
5878         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5879         data += sizeof(uint32_t);
5880 
5881         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5882                 TUNING_SENSOR_DATA_MAX);
5883         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5884                 count);
5885         data += count;
5886 
5887         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5888                 TUNING_VFE_DATA_MAX);
5889         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5890                 count);
5891         data += count;
5892 
5893         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5894                 TUNING_CPP_DATA_MAX);
5895         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5896                 count);
5897         data += count;
5898 
5899         count = MIN(metadata->tuning_params.tuning_cac_data_size,
5900                 TUNING_CAC_DATA_MAX);
5901         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5902                 count);
5903         data += count;
5904 
5905         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5906                 (int32_t *)(void *)tuning_meta_data_blob,
5907                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5908     }
5909 
5910     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5911             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5912         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5913                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5914                 NEUTRAL_COL_POINTS);
5915     }
5916 
5917     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5918         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5919         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5920     }
5921 
5922     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5923         int32_t aeRegions[REGIONS_TUPLE_COUNT];
5924         // Adjust crop region from sensor output coordinate system to active
5925         // array coordinate system.
5926         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5927                 hAeRegions->rect.width, hAeRegions->rect.height);
5928 
5929         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5930         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5931                 REGIONS_TUPLE_COUNT);
5932         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5933                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5934                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5935                 hAeRegions->rect.height);
5936     }
5937 
5938     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5939         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5940         if (NAME_NOT_FOUND != val) {
5941             uint8_t fwkAfMode = (uint8_t)val;
5942             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5943             LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5944         } else {
5945             LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5946                     val);
5947         }
5948     }
5949 
5950     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5951         uint8_t fwk_afState = (uint8_t) *afState;
5952         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5953         LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5954     }
5955 
5956     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5957         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5958     }
5959 
5960     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5961         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5962     }
5963 
5964     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5965         uint8_t fwk_lensState = *lensState;
5966         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5967     }
5968 
5969     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5970         /*af regions*/
5971         int32_t afRegions[REGIONS_TUPLE_COUNT];
5972         // Adjust crop region from sensor output coordinate system to active
5973         // array coordinate system.
5974         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5975                 hAfRegions->rect.width, hAfRegions->rect.height);
5976 
5977         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5978         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5979                 REGIONS_TUPLE_COUNT);
5980         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5981                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5982                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5983                 hAfRegions->rect.height);
5984     }
5985 
5986     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5987         uint32_t ab_mode = *hal_ab_mode;
5988         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
5989                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
5990               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
5991         }
5992         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5993                 ab_mode);
5994         if (NAME_NOT_FOUND != val) {
5995             uint8_t fwk_ab_mode = (uint8_t)val;
5996             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5997         }
5998     }
5999 
6000     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6001         int val = lookupFwkName(SCENE_MODES_MAP,
6002                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6003         if (NAME_NOT_FOUND != val) {
6004             uint8_t fwkBestshotMode = (uint8_t)val;
6005             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6006             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6007         } else {
6008             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6009         }
6010     }
6011 
6012     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6013          uint8_t fwk_mode = (uint8_t) *mode;
6014          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6015     }
6016 
6017     /* Constant metadata values to be update*/
6018     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6019     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6020 
6021     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6022     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6023 
6024     int32_t hotPixelMap[2];
6025     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6026 
6027     // CDS
6028     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6029         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6030     }
6031 
6032     // TNR
6033     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6034         uint8_t tnr_enable       = tnr->denoise_enable;
6035         int32_t tnr_process_type = (int32_t)tnr->process_plates;
6036 
6037         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6038         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6039     }
6040 
6041     // Reprocess crop data
6042     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6043         uint8_t cnt = crop_data->num_of_streams;
6044         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6045             // mm-qcamera-daemon only posts crop_data for streams
6046             // not linked to pproc. So no valid crop metadata is not
6047             // necessarily an error case.
6048             LOGD("No valid crop metadata entries");
6049         } else {
6050             uint32_t reproc_stream_id;
6051             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6052                 LOGD("No reprocessible stream found, ignore crop data");
6053             } else {
6054                 int rc = NO_ERROR;
6055                 Vector<int32_t> roi_map;
6056                 int32_t *crop = new int32_t[cnt*4];
6057                 if (NULL == crop) {
6058                    rc = NO_MEMORY;
6059                 }
6060                 if (NO_ERROR == rc) {
6061                     int32_t streams_found = 0;
6062                     for (size_t i = 0; i < cnt; i++) {
6063                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6064                             if (pprocDone) {
6065                                 // HAL already does internal reprocessing,
6066                                 // either via reprocessing before JPEG encoding,
6067                                 // or offline postprocessing for pproc bypass case.
6068                                 crop[0] = 0;
6069                                 crop[1] = 0;
6070                                 crop[2] = mInputStreamInfo.dim.width;
6071                                 crop[3] = mInputStreamInfo.dim.height;
6072                             } else {
6073                                 crop[0] = crop_data->crop_info[i].crop.left;
6074                                 crop[1] = crop_data->crop_info[i].crop.top;
6075                                 crop[2] = crop_data->crop_info[i].crop.width;
6076                                 crop[3] = crop_data->crop_info[i].crop.height;
6077                             }
6078                             roi_map.add(crop_data->crop_info[i].roi_map.left);
6079                             roi_map.add(crop_data->crop_info[i].roi_map.top);
6080                             roi_map.add(crop_data->crop_info[i].roi_map.width);
6081                             roi_map.add(crop_data->crop_info[i].roi_map.height);
6082                             streams_found++;
6083                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6084                                     crop[0], crop[1], crop[2], crop[3]);
6085                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6086                                     crop_data->crop_info[i].roi_map.left,
6087                                     crop_data->crop_info[i].roi_map.top,
6088                                     crop_data->crop_info[i].roi_map.width,
6089                                     crop_data->crop_info[i].roi_map.height);
6090                             break;
6091 
6092                        }
6093                     }
6094                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6095                             &streams_found, 1);
6096                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
6097                             crop, (size_t)(streams_found * 4));
6098                     if (roi_map.array()) {
6099                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6100                                 roi_map.array(), roi_map.size());
6101                     }
6102                }
6103                if (crop) {
6104                    delete [] crop;
6105                }
6106             }
6107         }
6108     }
6109 
6110     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6111         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6112         // so hardcoding the CAC result to OFF mode.
6113         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6114         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6115     } else {
6116         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6117             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6118                     *cacMode);
6119             if (NAME_NOT_FOUND != val) {
6120                 uint8_t resultCacMode = (uint8_t)val;
6121                 // check whether CAC result from CB is equal to Framework set CAC mode
6122                 // If not equal then set the CAC mode came in corresponding request
6123                 if (fwk_cacMode != resultCacMode) {
6124                     resultCacMode = fwk_cacMode;
6125                 }
6126                 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6127                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6128             } else {
6129                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6130             }
6131         }
6132     }
6133 
6134     // Post blob of cam_cds_data through vendor tag.
6135     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6136         uint8_t cnt = cdsInfo->num_of_streams;
6137         cam_cds_data_t cdsDataOverride;
6138         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6139         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6140         cdsDataOverride.num_of_streams = 1;
6141         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6142             uint32_t reproc_stream_id;
6143             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6144                 LOGD("No reprocessible stream found, ignore cds data");
6145             } else {
6146                 for (size_t i = 0; i < cnt; i++) {
6147                     if (cdsInfo->cds_info[i].stream_id ==
6148                             reproc_stream_id) {
6149                         cdsDataOverride.cds_info[0].cds_enable =
6150                                 cdsInfo->cds_info[i].cds_enable;
6151                         break;
6152                     }
6153                 }
6154             }
6155         } else {
6156             LOGD("Invalid stream count %d in CDS_DATA", cnt);
6157         }
6158         camMetadata.update(QCAMERA3_CDS_INFO,
6159                 (uint8_t *)&cdsDataOverride,
6160                 sizeof(cam_cds_data_t));
6161     }
6162 
6163     // Ldaf calibration data
6164     if (!mLdafCalibExist) {
6165         IF_META_AVAILABLE(uint32_t, ldafCalib,
6166                 CAM_INTF_META_LDAF_EXIF, metadata) {
6167             mLdafCalibExist = true;
6168             mLdafCalib[0] = ldafCalib[0];
6169             mLdafCalib[1] = ldafCalib[1];
6170             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6171                     ldafCalib[0], ldafCalib[1]);
6172         }
6173     }
6174 
6175     // AF scene change
6176     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6177         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6178     }
6179 
6180     resultMetadata = camMetadata.release();
6181     return resultMetadata;
6182 }
6183 
6184 /*===========================================================================
6185  * FUNCTION   : saveExifParams
6186  *
6187  * DESCRIPTION:
6188  *
6189  * PARAMETERS :
6190  *   @metadata : metadata information from callback
6191  *
6192  * RETURN     : none
6193  *
6194  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)6195 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6196 {
6197     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6198             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6199         if (mExifParams.debug_params) {
6200             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6201             mExifParams.debug_params->ae_debug_params_valid = TRUE;
6202         }
6203     }
6204     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6205             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6206         if (mExifParams.debug_params) {
6207             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6208             mExifParams.debug_params->awb_debug_params_valid = TRUE;
6209         }
6210     }
6211     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6212             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6213         if (mExifParams.debug_params) {
6214             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6215             mExifParams.debug_params->af_debug_params_valid = TRUE;
6216         }
6217     }
6218     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6219             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6220         if (mExifParams.debug_params) {
6221             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6222             mExifParams.debug_params->asd_debug_params_valid = TRUE;
6223         }
6224     }
6225     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6226             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6227         if (mExifParams.debug_params) {
6228             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6229             mExifParams.debug_params->stats_debug_params_valid = TRUE;
6230         }
6231     }
6232     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6233             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6234         if (mExifParams.debug_params) {
6235             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6236             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6237         }
6238     }
6239     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6240             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6241         if (mExifParams.debug_params) {
6242             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6243             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6244         }
6245     }
6246     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6247             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6248         if (mExifParams.debug_params) {
6249             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6250             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6251         }
6252     }
6253 }
6254 
6255 /*===========================================================================
6256  * FUNCTION   : get3AExifParams
6257  *
6258  * DESCRIPTION:
6259  *
6260  * PARAMETERS : none
6261  *
6262  *
6263  * RETURN     : mm_jpeg_exif_params_t
6264  *
6265  *==========================================================================*/
get3AExifParams()6266 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6267 {
6268     return mExifParams;
6269 }
6270 
6271 /*===========================================================================
6272  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6273  *
6274  * DESCRIPTION:
6275  *
6276  * PARAMETERS :
6277  *   @metadata : metadata information from callback
6278  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
6279  *                               urgent metadata in a batch. Always true for
6280  *                               non-batch mode.
6281  *
6282  * RETURN     : camera_metadata_t*
6283  *              metadata in a format specified by fwk
6284  *==========================================================================*/
6285 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch)6286 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6287                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
6288 {
6289     CameraMetadata camMetadata;
6290     camera_metadata_t *resultMetadata;
6291 
6292     if (!lastUrgentMetadataInBatch) {
6293         /* In batch mode, use empty metadata if this is not the last in batch
6294          */
6295         resultMetadata = allocate_camera_metadata(0, 0);
6296         return resultMetadata;
6297     }
6298 
6299     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6300         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6301         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6302         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6303     }
6304 
6305     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6306         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6307                 &aecTrigger->trigger, 1);
6308         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6309                 &aecTrigger->trigger_id, 1);
6310         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6311                  aecTrigger->trigger);
6312         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6313                 aecTrigger->trigger_id);
6314     }
6315 
6316     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6317         uint8_t fwk_ae_state = (uint8_t) *ae_state;
6318         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6319         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6320     }
6321 
6322     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6323         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6324                 &af_trigger->trigger, 1);
6325         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6326                  af_trigger->trigger);
6327         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6328         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6329                 af_trigger->trigger_id);
6330     }
6331 
6332     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6333         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6334                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6335         if (NAME_NOT_FOUND != val) {
6336             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6337             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6338             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6339         } else {
6340             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6341         }
6342     }
6343 
6344     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6345     uint32_t aeMode = CAM_AE_MODE_MAX;
6346     int32_t flashMode = CAM_FLASH_MODE_MAX;
6347     int32_t redeye = -1;
6348     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6349         aeMode = *pAeMode;
6350     }
6351     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6352         flashMode = *pFlashMode;
6353     }
6354     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6355         redeye = *pRedeye;
6356     }
6357 
6358     if (1 == redeye) {
6359         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6360         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6361     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6362         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6363                 flashMode);
6364         if (NAME_NOT_FOUND != val) {
6365             fwk_aeMode = (uint8_t)val;
6366             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6367         } else {
6368             LOGE("Unsupported flash mode %d", flashMode);
6369         }
6370     } else if (aeMode == CAM_AE_MODE_ON) {
6371         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6372         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6373     } else if (aeMode == CAM_AE_MODE_OFF) {
6374         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6375         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6376     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
6377         fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
6378         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6379     } else {
6380         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6381               "flashMode:%d, aeMode:%u!!!",
6382                  redeye, flashMode, aeMode);
6383     }
6384 
6385     resultMetadata = camMetadata.release();
6386     return resultMetadata;
6387 }
6388 
6389 /*===========================================================================
6390  * FUNCTION   : dumpMetadataToFile
6391  *
6392  * DESCRIPTION: Dumps tuning metadata to file system
6393  *
6394  * PARAMETERS :
6395  *   @meta           : tuning metadata
6396  *   @dumpFrameCount : current dump frame count
6397  *   @enabled        : Enable mask
6398  *
6399  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)6400 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6401                                                    uint32_t &dumpFrameCount,
6402                                                    bool enabled,
6403                                                    const char *type,
6404                                                    uint32_t frameNumber)
6405 {
6406     //Some sanity checks
6407     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6408         LOGE("Tuning sensor data size bigger than expected %d: %d",
6409               meta.tuning_sensor_data_size,
6410               TUNING_SENSOR_DATA_MAX);
6411         return;
6412     }
6413 
6414     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6415         LOGE("Tuning VFE data size bigger than expected %d: %d",
6416               meta.tuning_vfe_data_size,
6417               TUNING_VFE_DATA_MAX);
6418         return;
6419     }
6420 
6421     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6422         LOGE("Tuning CPP data size bigger than expected %d: %d",
6423               meta.tuning_cpp_data_size,
6424               TUNING_CPP_DATA_MAX);
6425         return;
6426     }
6427 
6428     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6429         LOGE("Tuning CAC data size bigger than expected %d: %d",
6430               meta.tuning_cac_data_size,
6431               TUNING_CAC_DATA_MAX);
6432         return;
6433     }
6434     //
6435 
6436     if(enabled){
6437         char timeBuf[FILENAME_MAX];
6438         char buf[FILENAME_MAX];
6439         memset(buf, 0, sizeof(buf));
6440         memset(timeBuf, 0, sizeof(timeBuf));
6441         time_t current_time;
6442         struct tm * timeinfo;
6443         time (&current_time);
6444         timeinfo = localtime (&current_time);
6445         if (timeinfo != NULL) {
6446             /* Consistent naming for Jpeg+meta+raw: meta name */
6447             strftime (timeBuf, sizeof(timeBuf),
6448                     QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6449             /* Consistent naming for Jpeg+meta+raw: meta name end*/
6450         }
6451         String8 filePath(timeBuf);
6452          /* Consistent naming for Jpeg+meta+raw */
6453         snprintf(buf,
6454                 sizeof(buf),
6455                 "%dm_%s_%d.bin",
6456                 dumpFrameCount,
6457                 type,
6458                 frameNumber);
6459          /* Consistent naming for Jpeg+meta+raw end */
6460         filePath.append(buf);
6461         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6462         if (file_fd >= 0) {
6463             ssize_t written_len = 0;
6464             meta.tuning_data_version = TUNING_DATA_VERSION;
6465             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6466             written_len += write(file_fd, data, sizeof(uint32_t));
6467             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6468             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6469             written_len += write(file_fd, data, sizeof(uint32_t));
6470             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6471             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6472             written_len += write(file_fd, data, sizeof(uint32_t));
6473             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6474             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6475             written_len += write(file_fd, data, sizeof(uint32_t));
6476             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6477             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6478             written_len += write(file_fd, data, sizeof(uint32_t));
6479             meta.tuning_mod3_data_size = 0;
6480             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6481             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6482             written_len += write(file_fd, data, sizeof(uint32_t));
6483             size_t total_size = meta.tuning_sensor_data_size;
6484             data = (void *)((uint8_t *)&meta.data);
6485             written_len += write(file_fd, data, total_size);
6486             total_size = meta.tuning_vfe_data_size;
6487             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6488             written_len += write(file_fd, data, total_size);
6489             total_size = meta.tuning_cpp_data_size;
6490             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6491             written_len += write(file_fd, data, total_size);
6492             total_size = meta.tuning_cac_data_size;
6493             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6494             written_len += write(file_fd, data, total_size);
6495             close(file_fd);
6496         }else {
6497             LOGE("fail to open file for metadata dumping");
6498         }
6499     }
6500 }
6501 
6502 /*===========================================================================
6503  * FUNCTION   : cleanAndSortStreamInfo
6504  *
6505  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6506  *              and sort them such that raw stream is at the end of the list
6507  *              This is a workaround for camera daemon constraint.
6508  *
6509  * PARAMETERS : None
6510  *
6511  *==========================================================================*/
cleanAndSortStreamInfo()6512 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6513 {
6514     List<stream_info_t *> newStreamInfo;
6515 
6516     /*clean up invalid streams*/
6517     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6518             it != mStreamInfo.end();) {
6519         if(((*it)->status) == INVALID){
6520             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6521             delete channel;
6522             free(*it);
6523             it = mStreamInfo.erase(it);
6524         } else {
6525             it++;
6526         }
6527     }
6528 
6529     // Move preview/video/callback/snapshot streams into newList
6530     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6531             it != mStreamInfo.end();) {
6532         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6533                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6534                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6535             newStreamInfo.push_back(*it);
6536             it = mStreamInfo.erase(it);
6537         } else
6538             it++;
6539     }
6540     // Move raw streams into newList
6541     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6542             it != mStreamInfo.end();) {
6543         newStreamInfo.push_back(*it);
6544         it = mStreamInfo.erase(it);
6545     }
6546 
6547     mStreamInfo = newStreamInfo;
6548 }
6549 
6550 /*===========================================================================
6551  * FUNCTION   : extractJpegMetadata
6552  *
6553  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6554  *              JPEG metadata is cached in HAL, and return as part of capture
6555  *              result when metadata is returned from camera daemon.
6556  *
6557  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6558  *              @request:      capture request
6559  *
6560  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)6561 void QCamera3HardwareInterface::extractJpegMetadata(
6562         CameraMetadata& jpegMetadata,
6563         const camera3_capture_request_t *request)
6564 {
6565     CameraMetadata frame_settings;
6566     frame_settings = request->settings;
6567 
6568     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6569         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6570                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6571                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6572 
6573     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6574         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6575                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6576                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6577 
6578     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6579         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6580                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6581                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6582 
6583     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6584         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6585                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6586                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6587 
6588     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6589         jpegMetadata.update(ANDROID_JPEG_QUALITY,
6590                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6591                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6592 
6593     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6594         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6595                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6596                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6597 
6598     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6599         int32_t thumbnail_size[2];
6600         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6601         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6602         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6603             int32_t orientation =
6604                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6605             if ((orientation == 90) || (orientation == 270)) {
6606                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6607                int32_t temp;
6608                temp = thumbnail_size[0];
6609                thumbnail_size[0] = thumbnail_size[1];
6610                thumbnail_size[1] = temp;
6611             }
6612          }
6613          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6614                 thumbnail_size,
6615                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6616     }
6617 
6618 }
6619 
6620 /*===========================================================================
6621  * FUNCTION   : convertToRegions
6622  *
6623  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6624  *
6625  * PARAMETERS :
6626  *   @rect   : cam_rect_t struct to convert
6627  *   @region : int32_t destination array
6628  *   @weight : if we are converting from cam_area_t, weight is valid
6629  *             else weight = -1
6630  *
6631  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)6632 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6633         int32_t *region, int weight)
6634 {
6635     region[0] = rect.left;
6636     region[1] = rect.top;
6637     region[2] = rect.left + rect.width;
6638     region[3] = rect.top + rect.height;
6639     if (weight > -1) {
6640         region[4] = weight;
6641     }
6642 }
6643 
6644 /*===========================================================================
6645  * FUNCTION   : convertFromRegions
6646  *
6647  * DESCRIPTION: helper method to convert from array to cam_rect_t
6648  *
6649  * PARAMETERS :
6650  *   @rect   : cam_rect_t struct to convert
6651  *   @region : int32_t destination array
6652  *   @weight : if we are converting from cam_area_t, weight is valid
6653  *             else weight = -1
6654  *
6655  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)6656 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6657         const camera_metadata_t *settings, uint32_t tag)
6658 {
6659     CameraMetadata frame_settings;
6660     frame_settings = settings;
6661     int32_t x_min = frame_settings.find(tag).data.i32[0];
6662     int32_t y_min = frame_settings.find(tag).data.i32[1];
6663     int32_t x_max = frame_settings.find(tag).data.i32[2];
6664     int32_t y_max = frame_settings.find(tag).data.i32[3];
6665     roi.weight = frame_settings.find(tag).data.i32[4];
6666     roi.rect.left = x_min;
6667     roi.rect.top = y_min;
6668     roi.rect.width = x_max - x_min;
6669     roi.rect.height = y_max - y_min;
6670 }
6671 
6672 /*===========================================================================
6673  * FUNCTION   : resetIfNeededROI
6674  *
6675  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6676  *              crop region
6677  *
6678  * PARAMETERS :
6679  *   @roi       : cam_area_t struct to resize
6680  *   @scalerCropRegion : cam_crop_region_t region to compare against
6681  *
6682  *
6683  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)6684 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6685                                                  const cam_crop_region_t* scalerCropRegion)
6686 {
6687     int32_t roi_x_max = roi->rect.width + roi->rect.left;
6688     int32_t roi_y_max = roi->rect.height + roi->rect.top;
6689     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6690     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6691 
6692     /* According to spec weight = 0 is used to indicate roi needs to be disabled
6693      * without having this check the calculations below to validate if the roi
6694      * is inside scalar crop region will fail resulting in the roi not being
6695      * reset causing algorithm to continue to use stale roi window
6696      */
6697     if (roi->weight == 0) {
6698         return true;
6699     }
6700 
6701     if ((roi_x_max < scalerCropRegion->left) ||
6702         // right edge of roi window is left of scalar crop's left edge
6703         (roi_y_max < scalerCropRegion->top)  ||
6704         // bottom edge of roi window is above scalar crop's top edge
6705         (roi->rect.left > crop_x_max) ||
6706         // left edge of roi window is beyond(right) of scalar crop's right edge
6707         (roi->rect.top > crop_y_max)){
6708         // top edge of roi windo is above scalar crop's top edge
6709         return false;
6710     }
6711     if (roi->rect.left < scalerCropRegion->left) {
6712         roi->rect.left = scalerCropRegion->left;
6713     }
6714     if (roi->rect.top < scalerCropRegion->top) {
6715         roi->rect.top = scalerCropRegion->top;
6716     }
6717     if (roi_x_max > crop_x_max) {
6718         roi_x_max = crop_x_max;
6719     }
6720     if (roi_y_max > crop_y_max) {
6721         roi_y_max = crop_y_max;
6722     }
6723     roi->rect.width = roi_x_max - roi->rect.left;
6724     roi->rect.height = roi_y_max - roi->rect.top;
6725     return true;
6726 }
6727 
6728 /*===========================================================================
6729  * FUNCTION   : convertLandmarks
6730  *
6731  * DESCRIPTION: helper method to extract the landmarks from face detection info
6732  *
6733  * PARAMETERS :
6734  *   @landmark_data : input landmark data to be converted
6735  *   @landmarks : int32_t destination array
6736  *
6737  *
6738  *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)6739 void QCamera3HardwareInterface::convertLandmarks(
6740         cam_face_landmarks_info_t landmark_data,
6741         int32_t *landmarks)
6742 {
6743     landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6744     landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6745     landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6746     landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6747     landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6748     landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6749 }
6750 
6751 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6752 /*===========================================================================
6753  * FUNCTION   : initCapabilities
6754  *
6755  * DESCRIPTION: initialize camera capabilities in static data struct
6756  *
6757  * PARAMETERS :
6758  *   @cameraId  : camera Id
6759  *
6760  * RETURN     : int32_t type of status
6761  *              NO_ERROR  -- success
6762  *              none-zero failure code
6763  *==========================================================================*/
initCapabilities(uint32_t cameraId)6764 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6765 {
6766     int rc = 0;
6767     mm_camera_vtbl_t *cameraHandle = NULL;
6768     QCamera3HeapMemory *capabilityHeap = NULL;
6769 
6770     rc = camera_open((uint8_t)cameraId, &cameraHandle);
6771     if (rc) {
6772         LOGE("camera_open failed. rc = %d", rc);
6773         goto open_failed;
6774     }
6775     if (!cameraHandle) {
6776         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6777         goto open_failed;
6778     }
6779 
6780     capabilityHeap = new QCamera3HeapMemory(1);
6781     if (capabilityHeap == NULL) {
6782         LOGE("creation of capabilityHeap failed");
6783         goto heap_creation_failed;
6784     }
6785     /* Allocate memory for capability buffer */
6786     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6787     if(rc != OK) {
6788         LOGE("No memory for cappability");
6789         goto allocate_failed;
6790     }
6791 
6792     /* Map memory for capability buffer */
6793     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6794     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6795                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6796                                 capabilityHeap->getFd(0),
6797                                 sizeof(cam_capability_t),
6798                                 capabilityHeap->getPtr(0));
6799     if(rc < 0) {
6800         LOGE("failed to map capability buffer");
6801         goto map_failed;
6802     }
6803 
6804     /* Query Capability */
6805     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6806     if(rc < 0) {
6807         LOGE("failed to query capability");
6808         goto query_failed;
6809     }
6810     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6811     if (!gCamCapability[cameraId]) {
6812         LOGE("out of memory");
6813         goto query_failed;
6814     }
6815     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6816                                         sizeof(cam_capability_t));
6817 
6818     int index;
6819     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6820         cam_analysis_info_t *p_analysis_info =
6821                 &gCamCapability[cameraId]->analysis_info[index];
6822         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6823         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6824     }
6825     rc = 0;
6826 
6827 query_failed:
6828     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6829                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
6830 map_failed:
6831     capabilityHeap->deallocate();
6832 allocate_failed:
6833     delete capabilityHeap;
6834 heap_creation_failed:
6835     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6836     cameraHandle = NULL;
6837 open_failed:
6838     return rc;
6839 }
6840 
6841 /*==========================================================================
6842  * FUNCTION   : get3Aversion
6843  *
6844  * DESCRIPTION: get the Q3A S/W version
6845  *
6846  * PARAMETERS :
6847  *  @sw_version: Reference of Q3A structure which will hold version info upon
6848  *               return
6849  *
6850  * RETURN     : None
6851  *
6852  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)6853 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6854 {
6855     if(gCamCapability[mCameraId])
6856         sw_version = gCamCapability[mCameraId]->q3a_version;
6857     else
6858         LOGE("Capability structure NULL!");
6859 }
6860 
6861 
6862 /*===========================================================================
6863  * FUNCTION   : initParameters
6864  *
6865  * DESCRIPTION: initialize camera parameters
6866  *
6867  * PARAMETERS :
6868  *
6869  * RETURN     : int32_t type of status
6870  *              NO_ERROR  -- success
6871  *              none-zero failure code
6872  *==========================================================================*/
initParameters()6873 int QCamera3HardwareInterface::initParameters()
6874 {
6875     int rc = 0;
6876 
6877     //Allocate Set Param Buffer
6878     mParamHeap = new QCamera3HeapMemory(1);
6879     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6880     if(rc != OK) {
6881         rc = NO_MEMORY;
6882         LOGE("Failed to allocate SETPARM Heap memory");
6883         delete mParamHeap;
6884         mParamHeap = NULL;
6885         return rc;
6886     }
6887 
6888     //Map memory for parameters buffer
6889     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6890             CAM_MAPPING_BUF_TYPE_PARM_BUF,
6891             mParamHeap->getFd(0),
6892             sizeof(metadata_buffer_t),
6893             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6894     if(rc < 0) {
6895         LOGE("failed to map SETPARM buffer");
6896         rc = FAILED_TRANSACTION;
6897         mParamHeap->deallocate();
6898         delete mParamHeap;
6899         mParamHeap = NULL;
6900         return rc;
6901     }
6902 
6903     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6904 
6905     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6906     return rc;
6907 }
6908 
6909 /*===========================================================================
6910  * FUNCTION   : deinitParameters
6911  *
6912  * DESCRIPTION: de-initialize camera parameters
6913  *
6914  * PARAMETERS :
6915  *
6916  * RETURN     : NONE
6917  *==========================================================================*/
deinitParameters()6918 void QCamera3HardwareInterface::deinitParameters()
6919 {
6920     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6921             CAM_MAPPING_BUF_TYPE_PARM_BUF);
6922 
6923     mParamHeap->deallocate();
6924     delete mParamHeap;
6925     mParamHeap = NULL;
6926 
6927     mParameters = NULL;
6928 
6929     free(mPrevParameters);
6930     mPrevParameters = NULL;
6931 }
6932 
6933 /*===========================================================================
6934  * FUNCTION   : calcMaxJpegSize
6935  *
6936  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6937  *
6938  * PARAMETERS :
6939  *
6940  * RETURN     : max_jpeg_size
6941  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)6942 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6943 {
6944     size_t max_jpeg_size = 0;
6945     size_t temp_width, temp_height;
6946     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6947             MAX_SIZES_CNT);
6948     for (size_t i = 0; i < count; i++) {
6949         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6950         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6951         if (temp_width * temp_height > max_jpeg_size ) {
6952             max_jpeg_size = temp_width * temp_height;
6953         }
6954     }
6955     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6956     return max_jpeg_size;
6957 }
6958 
6959 /*===========================================================================
6960  * FUNCTION   : getMaxRawSize
6961  *
6962  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6963  *
6964  * PARAMETERS :
6965  *
6966  * RETURN     : Largest supported Raw Dimension
6967  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)6968 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6969 {
6970     int max_width = 0;
6971     cam_dimension_t maxRawSize;
6972 
6973     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6974     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6975         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6976             max_width = gCamCapability[camera_id]->raw_dim[i].width;
6977             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6978         }
6979     }
6980     return maxRawSize;
6981 }
6982 
6983 
6984 /*===========================================================================
6985  * FUNCTION   : calcMaxJpegDim
6986  *
6987  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6988  *
6989  * PARAMETERS :
6990  *
6991  * RETURN     : max_jpeg_dim
6992  *==========================================================================*/
calcMaxJpegDim()6993 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6994 {
6995     cam_dimension_t max_jpeg_dim;
6996     cam_dimension_t curr_jpeg_dim;
6997     max_jpeg_dim.width = 0;
6998     max_jpeg_dim.height = 0;
6999     curr_jpeg_dim.width = 0;
7000     curr_jpeg_dim.height = 0;
7001     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7002         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7003         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7004         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7005             max_jpeg_dim.width * max_jpeg_dim.height ) {
7006             max_jpeg_dim.width = curr_jpeg_dim.width;
7007             max_jpeg_dim.height = curr_jpeg_dim.height;
7008         }
7009     }
7010     return max_jpeg_dim;
7011 }
7012 
7013 /*===========================================================================
7014  * FUNCTION   : addStreamConfig
7015  *
7016  * DESCRIPTION: adds the stream configuration to the array
7017  *
7018  * PARAMETERS :
7019  * @available_stream_configs : pointer to stream configuration array
7020  * @scalar_format            : scalar format
7021  * @dim                      : configuration dimension
7022  * @config_type              : input or output configuration type
7023  *
7024  * RETURN     : NONE
7025  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)7026 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7027         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7028 {
7029     available_stream_configs.add(scalar_format);
7030     available_stream_configs.add(dim.width);
7031     available_stream_configs.add(dim.height);
7032     available_stream_configs.add(config_type);
7033 }
7034 
7035 /*===========================================================================
7036  * FUNCTION   : suppportBurstCapture
7037  *
7038  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7039  *
7040  * PARAMETERS :
7041  *   @cameraId  : camera Id
7042  *
7043  * RETURN     : true if camera supports BURST_CAPTURE
7044  *              false otherwise
7045  *==========================================================================*/
supportBurstCapture(uint32_t cameraId)7046 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7047 {
7048     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7049     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7050     const int32_t highResWidth = 3264;
7051     const int32_t highResHeight = 2448;
7052 
7053     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7054         // Maximum resolution images cannot be captured at >= 10fps
7055         // -> not supporting BURST_CAPTURE
7056         return false;
7057     }
7058 
7059     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7060         // Maximum resolution images can be captured at >= 20fps
7061         // --> supporting BURST_CAPTURE
7062         return true;
7063     }
7064 
7065     // Find the smallest highRes resolution, or largest resolution if there is none
7066     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7067             MAX_SIZES_CNT);
7068     size_t highRes = 0;
7069     while ((highRes + 1 < totalCnt) &&
7070             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7071             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7072             highResWidth * highResHeight)) {
7073         highRes++;
7074     }
7075     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7076         return true;
7077     } else {
7078         return false;
7079     }
7080 }
7081 
7082 /*===========================================================================
7083  * FUNCTION   : initStaticMetadata
7084  *
7085  * DESCRIPTION: initialize the static metadata
7086  *
7087  * PARAMETERS :
7088  *   @cameraId  : camera Id
7089  *
7090  * RETURN     : int32_t type of status
7091  *              0  -- success
7092  *              non-zero failure code
7093  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)7094 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7095 {
7096     int rc = 0;
7097     CameraMetadata staticInfo;
7098     size_t count = 0;
7099     bool limitedDevice = false;
7100     char prop[PROPERTY_VALUE_MAX];
7101     bool supportBurst = false;
7102 
7103     supportBurst = supportBurstCapture(cameraId);
7104 
7105     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7106      * guaranteed or if min fps of max resolution is less than 20 fps, its
7107      * advertised as limited device*/
7108     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7109             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7110             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7111             !supportBurst;
7112 
7113     uint8_t supportedHwLvl = limitedDevice ?
7114             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
7115             // LEVEL_3 - This device will support level 3.
7116             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7117 
7118     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7119             &supportedHwLvl, 1);
7120 
7121     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
7122     /*HAL 3 only*/
7123     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7124                     &gCamCapability[cameraId]->min_focus_distance, 1);
7125 
7126     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7127                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
7128 
7129     /*should be using focal lengths but sensor doesn't provide that info now*/
7130     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7131                       &gCamCapability[cameraId]->focal_length,
7132                       1);
7133 
7134     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7135             gCamCapability[cameraId]->apertures,
7136             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7137 
7138     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7139             gCamCapability[cameraId]->filter_densities,
7140             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7141 
7142 
7143     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7144             (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7145             MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7146 
7147     int32_t lens_shading_map_size[] = {
7148             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7149             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7150     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7151                       lens_shading_map_size,
7152                       sizeof(lens_shading_map_size)/sizeof(int32_t));
7153 
7154     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7155             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7156 
7157     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7158             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7159 
7160     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7161             &gCamCapability[cameraId]->max_frame_duration, 1);
7162 
7163     camera_metadata_rational baseGainFactor = {
7164             gCamCapability[cameraId]->base_gain_factor.numerator,
7165             gCamCapability[cameraId]->base_gain_factor.denominator};
7166     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7167                       &baseGainFactor, 1);
7168 
7169     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7170                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7171 
7172     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7173             gCamCapability[cameraId]->pixel_array_size.height};
7174     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7175                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7176 
7177     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7178             gCamCapability[cameraId]->active_array_size.top,
7179             gCamCapability[cameraId]->active_array_size.width,
7180             gCamCapability[cameraId]->active_array_size.height};
7181     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7182             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7183 
7184     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7185             &gCamCapability[cameraId]->white_level, 1);
7186 
7187     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7188     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7189             gCamCapability[cameraId]->color_arrangement);
7190     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7191             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7192 
7193     bool hasBlackRegions = false;
7194     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7195         LOGW("black_region_count: %d is bounded to %d",
7196             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7197         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7198     }
7199     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7200         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7201         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7202             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7203         }
7204         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7205                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7206         hasBlackRegions = true;
7207     }
7208 
7209     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7210             &gCamCapability[cameraId]->flash_charge_duration, 1);
7211 
7212     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7213             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7214 
7215     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
7216             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
7217             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
7218     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7219             &timestampSource, 1);
7220 
7221     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7222             &gCamCapability[cameraId]->histogram_size, 1);
7223 
7224     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7225             &gCamCapability[cameraId]->max_histogram_count, 1);
7226 
7227     int32_t sharpness_map_size[] = {
7228             gCamCapability[cameraId]->sharpness_map_size.width,
7229             gCamCapability[cameraId]->sharpness_map_size.height};
7230 
7231     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7232             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7233 
7234     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7235             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7236 
7237     int32_t scalar_formats[] = {
7238             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7239             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7240             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7241             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7242             HAL_PIXEL_FORMAT_RAW10,
7243             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7244     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7245     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7246                       scalar_formats,
7247                       scalar_formats_count);
7248 
7249     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7250     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7251     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7252             count, MAX_SIZES_CNT, available_processed_sizes);
7253     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7254             available_processed_sizes, count * 2);
7255 
7256     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7257     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7258     makeTable(gCamCapability[cameraId]->raw_dim,
7259             count, MAX_SIZES_CNT, available_raw_sizes);
7260     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7261             available_raw_sizes, count * 2);
7262 
7263     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7264     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7265     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7266             count, MAX_SIZES_CNT, available_fps_ranges);
7267     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7268             available_fps_ranges, count * 2);
7269 
7270     camera_metadata_rational exposureCompensationStep = {
7271             gCamCapability[cameraId]->exp_compensation_step.numerator,
7272             gCamCapability[cameraId]->exp_compensation_step.denominator};
7273     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7274                       &exposureCompensationStep, 1);
7275 
7276     Vector<uint8_t> availableVstabModes;
7277     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7278     char eis_prop[PROPERTY_VALUE_MAX];
7279     memset(eis_prop, 0, sizeof(eis_prop));
7280     property_get("persist.camera.eis.enable", eis_prop, "0");
7281     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7282     if (facingBack && eis_prop_set) {
7283         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7284     }
7285     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7286                       availableVstabModes.array(), availableVstabModes.size());
7287 
7288     /*HAL 1 and HAL 3 common*/
7289     float maxZoom = 4;
7290     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7291             &maxZoom, 1);
7292 
7293     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7294     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7295 
7296     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7297     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7298         max3aRegions[2] = 0; /* AF not supported */
7299     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7300             max3aRegions, 3);
7301 
7302     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7303     memset(prop, 0, sizeof(prop));
7304     property_get("persist.camera.facedetect", prop, "1");
7305     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7306     LOGD("Support face detection mode: %d",
7307              supportedFaceDetectMode);
7308 
7309     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7310     Vector<uint8_t> availableFaceDetectModes;
7311     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7312     if (supportedFaceDetectMode == 1) {
7313         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7314     } else if (supportedFaceDetectMode == 2) {
7315         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7316     } else if (supportedFaceDetectMode == 3) {
7317         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7318         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7319     } else {
7320         maxFaces = 0;
7321     }
7322     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7323             availableFaceDetectModes.array(),
7324             availableFaceDetectModes.size());
7325     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7326             (int32_t *)&maxFaces, 1);
7327 
7328     int32_t exposureCompensationRange[] = {
7329             gCamCapability[cameraId]->exposure_compensation_min,
7330             gCamCapability[cameraId]->exposure_compensation_max};
7331     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7332             exposureCompensationRange,
7333             sizeof(exposureCompensationRange)/sizeof(int32_t));
7334 
7335     uint8_t lensFacing = (facingBack) ?
7336             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7337     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7338 
7339     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7340                       available_thumbnail_sizes,
7341                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7342 
7343     /*all sizes will be clubbed into this tag*/
7344     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7345     /*android.scaler.availableStreamConfigurations*/
7346     Vector<int32_t> available_stream_configs;
7347     cam_dimension_t active_array_dim;
7348     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7349     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7350     /* Add input/output stream configurations for each scalar formats*/
7351     for (size_t j = 0; j < scalar_formats_count; j++) {
7352         switch (scalar_formats[j]) {
7353         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7354         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7355         case HAL_PIXEL_FORMAT_RAW10:
7356             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7357                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7358                 addStreamConfig(available_stream_configs, scalar_formats[j],
7359                         gCamCapability[cameraId]->raw_dim[i],
7360                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7361             }
7362             break;
7363         case HAL_PIXEL_FORMAT_BLOB:
7364             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7365                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7366                 addStreamConfig(available_stream_configs, scalar_formats[j],
7367                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7368                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7369             }
7370             break;
7371         case HAL_PIXEL_FORMAT_YCbCr_420_888:
7372         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7373         default:
7374             cam_dimension_t largest_picture_size;
7375             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7376             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7377                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7378                 addStreamConfig(available_stream_configs, scalar_formats[j],
7379                         gCamCapability[cameraId]->picture_sizes_tbl[i],
7380                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7381                 /* Book keep largest */
7382                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7383                         >= largest_picture_size.width &&
7384                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
7385                         >= largest_picture_size.height)
7386                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7387             }
7388             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7389             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7390                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7391                  addStreamConfig(available_stream_configs, scalar_formats[j],
7392                          largest_picture_size,
7393                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7394             }
7395             break;
7396         }
7397     }
7398 
7399     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7400                       available_stream_configs.array(), available_stream_configs.size());
7401     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7402     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7403 
7404     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7405     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7406 
7407     /* android.scaler.availableMinFrameDurations */
7408     Vector<int64_t> available_min_durations;
7409     for (size_t j = 0; j < scalar_formats_count; j++) {
7410         switch (scalar_formats[j]) {
7411         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7412         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7413         case HAL_PIXEL_FORMAT_RAW10:
7414             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7415                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7416                 available_min_durations.add(scalar_formats[j]);
7417                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7418                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7419                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7420             }
7421             break;
7422         default:
7423             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7424                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7425                 available_min_durations.add(scalar_formats[j]);
7426                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7427                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7428                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7429             }
7430             break;
7431         }
7432     }
7433     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7434                       available_min_durations.array(), available_min_durations.size());
7435 
7436     Vector<int32_t> available_hfr_configs;
7437     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7438         int32_t fps = 0;
7439         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7440         case CAM_HFR_MODE_60FPS:
7441             fps = 60;
7442             break;
7443         case CAM_HFR_MODE_90FPS:
7444             fps = 90;
7445             break;
7446         case CAM_HFR_MODE_120FPS:
7447             fps = 120;
7448             break;
7449         case CAM_HFR_MODE_150FPS:
7450             fps = 150;
7451             break;
7452         case CAM_HFR_MODE_180FPS:
7453             fps = 180;
7454             break;
7455         case CAM_HFR_MODE_210FPS:
7456             fps = 210;
7457             break;
7458         case CAM_HFR_MODE_240FPS:
7459             fps = 240;
7460             break;
7461         case CAM_HFR_MODE_480FPS:
7462             fps = 480;
7463             break;
7464         case CAM_HFR_MODE_OFF:
7465         case CAM_HFR_MODE_MAX:
7466         default:
7467             break;
7468         }
7469 
7470         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7471         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7472             /* For each HFR frame rate, need to advertise one variable fps range
7473              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7474              * and [120, 120]. While camcorder preview alone is running [30, 120] is
7475              * set by the app. When video recording is started, [120, 120] is
7476              * set. This way sensor configuration does not change when recording
7477              * is started */
7478 
7479             /* (width, height, fps_min, fps_max, batch_size_max) */
7480             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7481                 j < MAX_SIZES_CNT; j++) {
7482                 available_hfr_configs.add(
7483                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7484                 available_hfr_configs.add(
7485                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7486                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7487                 available_hfr_configs.add(fps);
7488                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7489 
7490                 /* (width, height, fps_min, fps_max, batch_size_max) */
7491                 available_hfr_configs.add(
7492                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7493                 available_hfr_configs.add(
7494                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7495                 available_hfr_configs.add(fps);
7496                 available_hfr_configs.add(fps);
7497                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7498             }
7499        }
7500     }
7501     //Advertise HFR capability only if the property is set
7502     memset(prop, 0, sizeof(prop));
7503     property_get("persist.camera.hal3hfr.enable", prop, "1");
7504     uint8_t hfrEnable = (uint8_t)atoi(prop);
7505 
7506     if(hfrEnable && available_hfr_configs.array()) {
7507         staticInfo.update(
7508                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7509                 available_hfr_configs.array(), available_hfr_configs.size());
7510     }
7511 
7512     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7513     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7514                       &max_jpeg_size, 1);
7515 
7516     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7517     size_t size = 0;
7518     count = CAM_EFFECT_MODE_MAX;
7519     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7520     for (size_t i = 0; i < count; i++) {
7521         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7522                 gCamCapability[cameraId]->supported_effects[i]);
7523         if (NAME_NOT_FOUND != val) {
7524             avail_effects[size] = (uint8_t)val;
7525             size++;
7526         }
7527     }
7528     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7529                       avail_effects,
7530                       size);
7531 
7532     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7533     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7534     size_t supported_scene_modes_cnt = 0;
7535     count = CAM_SCENE_MODE_MAX;
7536     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7537     for (size_t i = 0; i < count; i++) {
7538         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7539                 CAM_SCENE_MODE_OFF) {
7540             int val = lookupFwkName(SCENE_MODES_MAP,
7541                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
7542                     gCamCapability[cameraId]->supported_scene_modes[i]);
7543             if (NAME_NOT_FOUND != val) {
7544                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7545                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7546                 supported_scene_modes_cnt++;
7547             }
7548         }
7549     }
7550     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7551                       avail_scene_modes,
7552                       supported_scene_modes_cnt);
7553 
7554     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7555     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7556                       supported_scene_modes_cnt,
7557                       CAM_SCENE_MODE_MAX,
7558                       scene_mode_overrides,
7559                       supported_indexes,
7560                       cameraId);
7561 
7562     if (supported_scene_modes_cnt == 0) {
7563         supported_scene_modes_cnt = 1;
7564         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7565     }
7566 
7567     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7568             scene_mode_overrides, supported_scene_modes_cnt * 3);
7569 
7570     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7571                                          ANDROID_CONTROL_MODE_AUTO,
7572                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7573     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7574             available_control_modes,
7575             3);
7576 
7577     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7578     size = 0;
7579     count = CAM_ANTIBANDING_MODE_MAX;
7580     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7581     for (size_t i = 0; i < count; i++) {
7582         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7583                 gCamCapability[cameraId]->supported_antibandings[i]);
7584         if (NAME_NOT_FOUND != val) {
7585             avail_antibanding_modes[size] = (uint8_t)val;
7586             size++;
7587         }
7588 
7589     }
7590     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7591                       avail_antibanding_modes,
7592                       size);
7593 
7594     uint8_t avail_abberation_modes[] = {
7595             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7596             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7597             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7598     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7599     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7600     if (0 == count) {
7601         //  If no aberration correction modes are available for a device, this advertise OFF mode
7602         size = 1;
7603     } else {
7604         // If count is not zero then atleast one among the FAST or HIGH quality is supported
7605         // So, advertize all 3 modes if atleast any one mode is supported as per the
7606         // new M requirement
7607         size = 3;
7608     }
7609     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7610             avail_abberation_modes,
7611             size);
7612 
7613     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7614     size = 0;
7615     count = CAM_FOCUS_MODE_MAX;
7616     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7617     for (size_t i = 0; i < count; i++) {
7618         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7619                 gCamCapability[cameraId]->supported_focus_modes[i]);
7620         if (NAME_NOT_FOUND != val) {
7621             avail_af_modes[size] = (uint8_t)val;
7622             size++;
7623         }
7624     }
7625     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7626                       avail_af_modes,
7627                       size);
7628 
7629     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7630     size = 0;
7631     count = CAM_WB_MODE_MAX;
7632     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7633     for (size_t i = 0; i < count; i++) {
7634         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7635                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7636                 gCamCapability[cameraId]->supported_white_balances[i]);
7637         if (NAME_NOT_FOUND != val) {
7638             avail_awb_modes[size] = (uint8_t)val;
7639             size++;
7640         }
7641     }
7642     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7643                       avail_awb_modes,
7644                       size);
7645 
7646     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7647     count = CAM_FLASH_FIRING_LEVEL_MAX;
7648     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7649             count);
7650     for (size_t i = 0; i < count; i++) {
7651         available_flash_levels[i] =
7652                 gCamCapability[cameraId]->supported_firing_levels[i];
7653     }
7654     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7655             available_flash_levels, count);
7656 
7657     uint8_t flashAvailable;
7658     if (gCamCapability[cameraId]->flash_available)
7659         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7660     else
7661         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7662     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7663             &flashAvailable, 1);
7664 
7665     Vector<uint8_t> avail_ae_modes;
7666     count = CAM_AE_MODE_MAX;
7667     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7668     for (size_t i = 0; i < count; i++) {
7669         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
7670         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
7671             aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
7672         }
7673         avail_ae_modes.add(aeMode);
7674     }
7675     if (flashAvailable) {
7676         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7677         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7678     }
7679     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7680                       avail_ae_modes.array(),
7681                       avail_ae_modes.size());
7682 
7683     int32_t sensitivity_range[2];
7684     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7685     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7686     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7687                       sensitivity_range,
7688                       sizeof(sensitivity_range) / sizeof(int32_t));
7689 
7690     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7691                       &gCamCapability[cameraId]->max_analog_sensitivity,
7692                       1);
7693 
7694     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7695     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7696                       &sensor_orientation,
7697                       1);
7698 
7699     int32_t max_output_streams[] = {
7700             MAX_STALLING_STREAMS,
7701             MAX_PROCESSED_STREAMS,
7702             MAX_RAW_STREAMS};
7703     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7704             max_output_streams,
7705             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7706 
7707     uint8_t avail_leds = 0;
7708     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7709                       &avail_leds, 0);
7710 
7711     uint8_t focus_dist_calibrated;
7712     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7713             gCamCapability[cameraId]->focus_dist_calibrated);
7714     if (NAME_NOT_FOUND != val) {
7715         focus_dist_calibrated = (uint8_t)val;
7716         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7717                      &focus_dist_calibrated, 1);
7718     }
7719 
7720     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7721     size = 0;
7722     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7723             MAX_TEST_PATTERN_CNT);
7724     for (size_t i = 0; i < count; i++) {
7725         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7726                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7727         if (NAME_NOT_FOUND != testpatternMode) {
7728             avail_testpattern_modes[size] = testpatternMode;
7729             size++;
7730         }
7731     }
7732     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7733                       avail_testpattern_modes,
7734                       size);
7735 
7736     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7737     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7738                       &max_pipeline_depth,
7739                       1);
7740 
7741     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7742     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7743                       &partial_result_count,
7744                        1);
7745 
7746     int32_t max_stall_duration = MAX_REPROCESS_STALL;
7747     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7748 
7749     Vector<uint8_t> available_capabilities;
7750     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7751     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7752     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7753     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7754     if (supportBurst) {
7755         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7756     }
7757     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7758     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7759     if (hfrEnable && available_hfr_configs.array()) {
7760         available_capabilities.add(
7761                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7762     }
7763 
7764     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7765         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7766     }
7767     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7768             available_capabilities.array(),
7769             available_capabilities.size());
7770 
7771     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7772     //Assumption is that all bayer cameras support MANUAL_SENSOR.
7773     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7774             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7775 
7776     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7777             &aeLockAvailable, 1);
7778 
7779     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7780     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7781     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7782             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7783 
7784     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7785             &awbLockAvailable, 1);
7786 
7787     int32_t max_input_streams = 1;
7788     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7789                       &max_input_streams,
7790                       1);
7791 
7792     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7793     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7794             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7795             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7796             HAL_PIXEL_FORMAT_YCbCr_420_888};
7797     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7798                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7799 
7800     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7801     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7802                       &max_latency,
7803                       1);
7804 
7805     int32_t isp_sensitivity_range[2];
7806     isp_sensitivity_range[0] =
7807         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7808     isp_sensitivity_range[1] =
7809         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7810     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7811                       isp_sensitivity_range,
7812                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7813 
7814     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7815                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7816     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7817             available_hot_pixel_modes,
7818             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7819 
7820     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7821                                          ANDROID_SHADING_MODE_FAST,
7822                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
7823     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7824                       available_shading_modes,
7825                       3);
7826 
7827     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7828                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7829     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7830                       available_lens_shading_map_modes,
7831                       2);
7832 
7833     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7834                                       ANDROID_EDGE_MODE_FAST,
7835                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
7836                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7837     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7838             available_edge_modes,
7839             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7840 
7841     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7842                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
7843                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7844                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7845                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7846     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7847             available_noise_red_modes,
7848             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7849 
7850     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7851                                          ANDROID_TONEMAP_MODE_FAST,
7852                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7853     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7854             available_tonemap_modes,
7855             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7856 
7857     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7858     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7859             available_hot_pixel_map_modes,
7860             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7861 
7862     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7863             gCamCapability[cameraId]->reference_illuminant1);
7864     if (NAME_NOT_FOUND != val) {
7865         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7866         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7867     }
7868 
7869     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7870             gCamCapability[cameraId]->reference_illuminant2);
7871     if (NAME_NOT_FOUND != val) {
7872         uint8_t fwkReferenceIlluminant = (uint8_t)val;
7873         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7874     }
7875 
7876     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7877             (void *)gCamCapability[cameraId]->forward_matrix1,
7878             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7879 
7880     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7881             (void *)gCamCapability[cameraId]->forward_matrix2,
7882             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7883 
7884     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7885             (void *)gCamCapability[cameraId]->color_transform1,
7886             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7887 
7888     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7889             (void *)gCamCapability[cameraId]->color_transform2,
7890             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7891 
7892     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7893             (void *)gCamCapability[cameraId]->calibration_transform1,
7894             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7895 
7896     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7897             (void *)gCamCapability[cameraId]->calibration_transform2,
7898             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7899 
7900     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7901        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7902        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7903        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7904        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7905        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7906        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7907        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7908        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7909        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7910        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7911        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7912        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7913        ANDROID_JPEG_GPS_COORDINATES,
7914        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7915        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7916        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7917        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7918        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7919        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7920        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7921        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7922        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7923        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7924        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7925        ANDROID_STATISTICS_FACE_DETECT_MODE,
7926        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7927        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7928        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7929        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7930        /* DevCamDebug metadata request_keys_basic */
7931        DEVCAMDEBUG_META_ENABLE,
7932        /* DevCamDebug metadata end */
7933        };
7934 
7935     size_t request_keys_cnt =
7936             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7937     Vector<int32_t> available_request_keys;
7938     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7939     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7940         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7941     }
7942 
7943     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7944             available_request_keys.array(), available_request_keys.size());
7945 
7946     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7947        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7948        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7949        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7950        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7951        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7952        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7953        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7954        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7955        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7956        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7957        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7958        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7959        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7960        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7961        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7962        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7963        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7964        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7965        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7966        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7967        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7968        ANDROID_STATISTICS_FACE_SCORES,
7969        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7970        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7971        // DevCamDebug metadata result_keys_basic
7972        DEVCAMDEBUG_META_ENABLE,
7973        // DevCamDebug metadata result_keys AF
7974        DEVCAMDEBUG_AF_LENS_POSITION,
7975        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7976        DEVCAMDEBUG_AF_TOF_DISTANCE,
7977        DEVCAMDEBUG_AF_LUMA,
7978        DEVCAMDEBUG_AF_HAF_STATE,
7979        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7980        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7981        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7982        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7983        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7984        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7985        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7986        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7987        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7988        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7989        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7990        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7991        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7992        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7993        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7994        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7995        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7996        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7997        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7998        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7999        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8000        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8001        // DevCamDebug metadata result_keys AEC
8002        DEVCAMDEBUG_AEC_TARGET_LUMA,
8003        DEVCAMDEBUG_AEC_COMP_LUMA,
8004        DEVCAMDEBUG_AEC_AVG_LUMA,
8005        DEVCAMDEBUG_AEC_CUR_LUMA,
8006        DEVCAMDEBUG_AEC_LINECOUNT,
8007        DEVCAMDEBUG_AEC_REAL_GAIN,
8008        DEVCAMDEBUG_AEC_EXP_INDEX,
8009        DEVCAMDEBUG_AEC_LUX_IDX,
8010        // DevCamDebug metadata result_keys AWB
8011        DEVCAMDEBUG_AWB_R_GAIN,
8012        DEVCAMDEBUG_AWB_G_GAIN,
8013        DEVCAMDEBUG_AWB_B_GAIN,
8014        DEVCAMDEBUG_AWB_CCT,
8015        DEVCAMDEBUG_AWB_DECISION,
8016        /* DevCamDebug metadata end */
8017        };
8018     size_t result_keys_cnt =
8019             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8020 
8021     Vector<int32_t> available_result_keys;
8022     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8023     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8024         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8025     }
8026     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8027         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8028         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8029     }
8030     if (supportedFaceDetectMode == 1) {
8031         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8032         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8033     } else if ((supportedFaceDetectMode == 2) ||
8034             (supportedFaceDetectMode == 3)) {
8035         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8036         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8037     }
8038     if (hasBlackRegions) {
8039         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8040         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8041     }
8042     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8043             available_result_keys.array(), available_result_keys.size());
8044 
8045     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8046        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8047        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8048        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8049        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8050        ANDROID_SCALER_CROPPING_TYPE,
8051        ANDROID_SYNC_MAX_LATENCY,
8052        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8053        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8054        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8055        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8056        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8057        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8058        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8059        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8060        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8061        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8062        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8063        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8064        ANDROID_LENS_FACING,
8065        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8066        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8067        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8068        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8069        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8070        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8071        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8072        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8073        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8074        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8075        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8076        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8077        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8078        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8079        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8080        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8081        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8082        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8083        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8084        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8085        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8086        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8087        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8088        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8089        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8090        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8091        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8092        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8093        ANDROID_TONEMAP_MAX_CURVE_POINTS,
8094        ANDROID_CONTROL_AVAILABLE_MODES,
8095        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8096        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8097        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8098        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8099        ANDROID_SHADING_AVAILABLE_MODES,
8100        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8101        ANDROID_SENSOR_OPAQUE_RAW_SIZE };
8102 
8103     Vector<int32_t> available_characteristics_keys;
8104     available_characteristics_keys.appendArray(characteristics_keys_basic,
8105             sizeof(characteristics_keys_basic)/sizeof(int32_t));
8106     if (hasBlackRegions) {
8107         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8108     }
8109     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
8110                       available_characteristics_keys.array(),
8111                       available_characteristics_keys.size());
8112 
8113     /*available stall durations depend on the hw + sw and will be different for different devices */
8114     /*have to add for raw after implementation*/
8115     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8116     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8117 
8118     Vector<int64_t> available_stall_durations;
8119     for (uint32_t j = 0; j < stall_formats_count; j++) {
8120         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8121             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8122                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8123                 available_stall_durations.add(stall_formats[j]);
8124                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8125                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8126                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8127           }
8128         } else {
8129             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8130                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8131                 available_stall_durations.add(stall_formats[j]);
8132                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8133                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8134                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8135             }
8136         }
8137     }
8138     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8139                       available_stall_durations.array(),
8140                       available_stall_durations.size());
8141 
8142     //QCAMERA3_OPAQUE_RAW
8143     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8144     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8145     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8146     case LEGACY_RAW:
8147         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8148             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8149         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8150             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8151         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8152             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8153         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8154         break;
8155     case MIPI_RAW:
8156         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8157             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8158         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8159             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8160         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8161             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8162         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8163         break;
8164     default:
8165         LOGE("unknown opaque_raw_format %d",
8166                 gCamCapability[cameraId]->opaque_raw_fmt);
8167         break;
8168     }
8169     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8170 
8171     Vector<int32_t> strides;
8172     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8173             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8174         cam_stream_buf_plane_info_t buf_planes;
8175         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8176         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8177         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8178             &gCamCapability[cameraId]->padding_info, &buf_planes);
8179         strides.add(buf_planes.plane_info.mp[0].stride);
8180     }
8181     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8182             strides.size());
8183 
8184     Vector<int32_t> opaque_size;
8185     for (size_t j = 0; j < scalar_formats_count; j++) {
8186         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8187             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8188                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8189                 cam_stream_buf_plane_info_t buf_planes;
8190 
8191                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8192                          &gCamCapability[cameraId]->padding_info, &buf_planes);
8193 
8194                 if (rc == 0) {
8195                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8196                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8197                     opaque_size.add(buf_planes.plane_info.frame_len);
8198                 }else {
8199                     LOGE("raw frame calculation failed!");
8200                 }
8201             }
8202         }
8203     }
8204 
8205     if ((opaque_size.size() > 0) &&
8206             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8207         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8208     else
8209         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8210 
8211     gStaticMetadata[cameraId] = staticInfo.release();
8212     return rc;
8213 }
8214 
8215 /*===========================================================================
8216  * FUNCTION   : makeTable
8217  *
8218  * DESCRIPTION: make a table of sizes
8219  *
8220  * PARAMETERS :
8221  *
8222  *
8223  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)8224 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8225         size_t max_size, int32_t *sizeTable)
8226 {
8227     size_t j = 0;
8228     if (size > max_size) {
8229        size = max_size;
8230     }
8231     for (size_t i = 0; i < size; i++) {
8232         sizeTable[j] = dimTable[i].width;
8233         sizeTable[j+1] = dimTable[i].height;
8234         j+=2;
8235     }
8236 }
8237 
8238 /*===========================================================================
8239  * FUNCTION   : makeFPSTable
8240  *
8241  * DESCRIPTION: make a table of fps ranges
8242  *
8243  * PARAMETERS :
8244  *
8245  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)8246 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8247         size_t max_size, int32_t *fpsRangesTable)
8248 {
8249     size_t j = 0;
8250     if (size > max_size) {
8251        size = max_size;
8252     }
8253     for (size_t i = 0; i < size; i++) {
8254         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8255         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8256         j+=2;
8257     }
8258 }
8259 
8260 /*===========================================================================
8261  * FUNCTION   : makeOverridesList
8262  *
8263  * DESCRIPTION: make a list of scene mode overrides
8264  *
8265  * PARAMETERS :
8266  *
8267  *
8268  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)8269 void QCamera3HardwareInterface::makeOverridesList(
8270         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8271         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8272 {
8273     /*daemon will give a list of overrides for all scene modes.
8274       However we should send the fwk only the overrides for the scene modes
8275       supported by the framework*/
8276     size_t j = 0;
8277     if (size > max_size) {
8278        size = max_size;
8279     }
8280     size_t focus_count = CAM_FOCUS_MODE_MAX;
8281     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8282             focus_count);
8283     for (size_t i = 0; i < size; i++) {
8284         bool supt = false;
8285         size_t index = supported_indexes[i];
8286         overridesList[j] = gCamCapability[camera_id]->flash_available ?
8287                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8288         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8289                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8290                 overridesTable[index].awb_mode);
8291         if (NAME_NOT_FOUND != val) {
8292             overridesList[j+1] = (uint8_t)val;
8293         }
8294         uint8_t focus_override = overridesTable[index].af_mode;
8295         for (size_t k = 0; k < focus_count; k++) {
8296            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8297               supt = true;
8298               break;
8299            }
8300         }
8301         if (supt) {
8302             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8303                     focus_override);
8304             if (NAME_NOT_FOUND != val) {
8305                 overridesList[j+2] = (uint8_t)val;
8306             }
8307         } else {
8308            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8309         }
8310         j+=3;
8311     }
8312 }
8313 
8314 /*===========================================================================
8315  * FUNCTION   : filterJpegSizes
8316  *
8317  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8318  *              could be downscaled to
8319  *
8320  * PARAMETERS :
8321  *
8322  * RETURN     : length of jpegSizes array
8323  *==========================================================================*/
8324 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)8325 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8326         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8327         uint8_t downscale_factor)
8328 {
8329     if (0 == downscale_factor) {
8330         downscale_factor = 1;
8331     }
8332 
8333     int32_t min_width = active_array_size.width / downscale_factor;
8334     int32_t min_height = active_array_size.height / downscale_factor;
8335     size_t jpegSizesCnt = 0;
8336     if (processedSizesCnt > maxCount) {
8337         processedSizesCnt = maxCount;
8338     }
8339     for (size_t i = 0; i < processedSizesCnt; i+=2) {
8340         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8341             jpegSizes[jpegSizesCnt] = processedSizes[i];
8342             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8343             jpegSizesCnt += 2;
8344         }
8345     }
8346     return jpegSizesCnt;
8347 }
8348 
8349 /*===========================================================================
8350  * FUNCTION   : computeNoiseModelEntryS
8351  *
8352  * DESCRIPTION: function to map a given sensitivity to the S noise
8353  *              model parameters in the DNG noise model.
8354  *
8355  * PARAMETERS : sens : the sensor sensitivity
8356  *
8357  ** RETURN    : S (sensor amplification) noise
8358  *
8359  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)8360 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8361     double s = gCamCapability[mCameraId]->gradient_S * sens +
8362             gCamCapability[mCameraId]->offset_S;
8363     return ((s < 0.0) ? 0.0 : s);
8364 }
8365 
8366 /*===========================================================================
8367  * FUNCTION   : computeNoiseModelEntryO
8368  *
8369  * DESCRIPTION: function to map a given sensitivity to the O noise
8370  *              model parameters in the DNG noise model.
8371  *
8372  * PARAMETERS : sens : the sensor sensitivity
8373  *
8374  ** RETURN    : O (sensor readout) noise
8375  *
8376  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)8377 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8378     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8379     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8380             1.0 : (1.0 * sens / max_analog_sens);
8381     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8382             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8383     return ((o < 0.0) ? 0.0 : o);
8384 }
8385 
8386 /*===========================================================================
8387  * FUNCTION   : getSensorSensitivity
8388  *
8389  * DESCRIPTION: convert iso_mode to an integer value
8390  *
8391  * PARAMETERS : iso_mode : the iso_mode supported by sensor
8392  *
8393  ** RETURN    : sensitivity supported by sensor
8394  *
8395  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)8396 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8397 {
8398     int32_t sensitivity;
8399 
8400     switch (iso_mode) {
8401     case CAM_ISO_MODE_100:
8402         sensitivity = 100;
8403         break;
8404     case CAM_ISO_MODE_200:
8405         sensitivity = 200;
8406         break;
8407     case CAM_ISO_MODE_400:
8408         sensitivity = 400;
8409         break;
8410     case CAM_ISO_MODE_800:
8411         sensitivity = 800;
8412         break;
8413     case CAM_ISO_MODE_1600:
8414         sensitivity = 1600;
8415         break;
8416     default:
8417         sensitivity = -1;
8418         break;
8419     }
8420     return sensitivity;
8421 }
8422 
8423 /*===========================================================================
8424  * FUNCTION   : getCamInfo
8425  *
8426  * DESCRIPTION: query camera capabilities
8427  *
8428  * PARAMETERS :
8429  *   @cameraId  : camera Id
8430  *   @info      : camera info struct to be filled in with camera capabilities
8431  *
8432  * RETURN     : int type of status
8433  *              NO_ERROR  -- success
8434  *              none-zero failure code
8435  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)8436 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8437         struct camera_info *info)
8438 {
8439     ATRACE_CALL();
8440     int rc = 0;
8441 
8442     pthread_mutex_lock(&gCamLock);
8443     if (NULL == gCamCapability[cameraId]) {
8444         rc = initCapabilities(cameraId);
8445         if (rc < 0) {
8446             pthread_mutex_unlock(&gCamLock);
8447             return rc;
8448         }
8449     }
8450 
8451     if (NULL == gStaticMetadata[cameraId]) {
8452         rc = initStaticMetadata(cameraId);
8453         if (rc < 0) {
8454             pthread_mutex_unlock(&gCamLock);
8455             return rc;
8456         }
8457     }
8458 
8459     switch(gCamCapability[cameraId]->position) {
8460     case CAM_POSITION_BACK:
8461         info->facing = CAMERA_FACING_BACK;
8462         break;
8463 
8464     case CAM_POSITION_FRONT:
8465         info->facing = CAMERA_FACING_FRONT;
8466         break;
8467 
8468     default:
8469         LOGE("Unknown position type for camera id:%d", cameraId);
8470         rc = -1;
8471         break;
8472     }
8473 
8474 
8475     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8476     info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8477     info->static_camera_characteristics = gStaticMetadata[cameraId];
8478 
8479     //For now assume both cameras can operate independently.
8480     info->conflicting_devices = NULL;
8481     info->conflicting_devices_length = 0;
8482 
8483     //resource cost is 100 * MIN(1.0, m/M),
8484     //where m is throughput requirement with maximum stream configuration
8485     //and M is CPP maximum throughput.
8486     float max_fps = 0.0;
8487     for (uint32_t i = 0;
8488             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8489         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8490             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8491     }
8492     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8493             gCamCapability[cameraId]->active_array_size.width *
8494             gCamCapability[cameraId]->active_array_size.height * max_fps /
8495             gCamCapability[cameraId]->max_pixel_bandwidth;
8496     info->resource_cost = 100 * MIN(1.0, ratio);
8497     LOGI("camera %d resource cost is %d", cameraId,
8498             info->resource_cost);
8499 
8500     pthread_mutex_unlock(&gCamLock);
8501     return rc;
8502 }
8503 
8504 /*===========================================================================
8505  * FUNCTION   : translateCapabilityToMetadata
8506  *
8507  * DESCRIPTION: translate the capability into camera_metadata_t
8508  *
8509  * PARAMETERS : type of the request
8510  *
8511  *
8512  * RETURN     : success: camera_metadata_t*
8513  *              failure: NULL
8514  *
8515  *==========================================================================*/
translateCapabilityToMetadata(int type)8516 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8517 {
8518     if (mDefaultMetadata[type] != NULL) {
8519         return mDefaultMetadata[type];
8520     }
8521     //first time we are handling this request
8522     //fill up the metadata structure using the wrapper class
8523     CameraMetadata settings;
8524     //translate from cam_capability_t to camera_metadata_tag_t
8525     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8526     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8527     int32_t defaultRequestID = 0;
8528     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8529 
8530     /* OIS disable */
8531     char ois_prop[PROPERTY_VALUE_MAX];
8532     memset(ois_prop, 0, sizeof(ois_prop));
8533     property_get("persist.camera.ois.disable", ois_prop, "0");
8534     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8535 
8536     /* Force video to use OIS */
8537     char videoOisProp[PROPERTY_VALUE_MAX];
8538     memset(videoOisProp, 0, sizeof(videoOisProp));
8539     property_get("persist.camera.ois.video", videoOisProp, "1");
8540     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8541 
8542     // EIS enable/disable
8543     char eis_prop[PROPERTY_VALUE_MAX];
8544     memset(eis_prop, 0, sizeof(eis_prop));
8545     property_get("persist.camera.eis.enable", eis_prop, "0");
8546     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8547 
8548     // Hybrid AE enable/disable
8549     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8550     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8551     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8552     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8553 
8554     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8555     // This is a bit hacky. EIS is enabled only when the above setprop
8556     // is set to non-zero value and on back camera (for 2015 Nexus).
8557     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8558     // configureStream is called before this function. In other words,
8559     // we cannot guarantee the app will call configureStream before
8560     // calling createDefaultRequest.
8561     const bool eisEnabled = facingBack && eis_prop_set;
8562 
8563     uint8_t controlIntent = 0;
8564     uint8_t focusMode;
8565     uint8_t vsMode;
8566     uint8_t optStabMode;
8567     uint8_t cacMode;
8568     uint8_t edge_mode;
8569     uint8_t noise_red_mode;
8570     uint8_t tonemap_mode;
8571     bool highQualityModeEntryAvailable = FALSE;
8572     bool fastModeEntryAvailable = FALSE;
8573     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8574     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8575     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8576 
8577     switch (type) {
8578       case CAMERA3_TEMPLATE_PREVIEW:
8579         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8580         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8581         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8582         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8583         edge_mode = ANDROID_EDGE_MODE_FAST;
8584         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8585         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8586         break;
8587       case CAMERA3_TEMPLATE_STILL_CAPTURE:
8588         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8589         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8590         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8591         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8592         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8593         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8594         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8595         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8596         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8597             if (gCamCapability[mCameraId]->aberration_modes[i] ==
8598                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8599                 highQualityModeEntryAvailable = TRUE;
8600             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8601                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8602                 fastModeEntryAvailable = TRUE;
8603             }
8604         }
8605         if (highQualityModeEntryAvailable) {
8606             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8607         } else if (fastModeEntryAvailable) {
8608             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8609         }
8610         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8611             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8612         }
8613         break;
8614       case CAMERA3_TEMPLATE_VIDEO_RECORD:
8615         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8616         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8617         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8618         if (eisEnabled) {
8619             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8620         }
8621         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8622         edge_mode = ANDROID_EDGE_MODE_FAST;
8623         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8624         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8625         if (forceVideoOis)
8626             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8627         break;
8628       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8629         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8630         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8631         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8632         if (eisEnabled) {
8633             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8634         }
8635         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8636         edge_mode = ANDROID_EDGE_MODE_FAST;
8637         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8638         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8639         if (forceVideoOis)
8640             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8641         break;
8642       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8643         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8644         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8645         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8646         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8647         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8648         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8649         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8650         break;
8651       case CAMERA3_TEMPLATE_MANUAL:
8652         edge_mode = ANDROID_EDGE_MODE_FAST;
8653         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8654         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8655         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8656         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8657         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8658         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8659         break;
8660       default:
8661         edge_mode = ANDROID_EDGE_MODE_FAST;
8662         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8663         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8664         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8665         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8666         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8667         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8668         break;
8669     }
8670     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8671     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8672     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8673     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8674         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8675     }
8676     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8677 
8678     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8679             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8680         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8681     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8682             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8683             || ois_disable)
8684         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8685     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8686     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8687 
8688     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8689             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8690 
8691     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8692     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8693 
8694     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8695     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8696 
8697     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8698     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8699 
8700     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8701     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8702 
8703     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8704     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8705 
8706     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8707     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8708 
8709     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8710     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8711 
8712     /*flash*/
8713     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8714     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8715 
8716     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8717     settings.update(ANDROID_FLASH_FIRING_POWER,
8718             &flashFiringLevel, 1);
8719 
8720     /* lens */
8721     float default_aperture = gCamCapability[mCameraId]->apertures[0];
8722     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8723 
8724     if (gCamCapability[mCameraId]->filter_densities_count) {
8725         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8726         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8727                         gCamCapability[mCameraId]->filter_densities_count);
8728     }
8729 
8730     float default_focal_length = gCamCapability[mCameraId]->focal_length;
8731     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8732 
8733     if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8734         float default_focus_distance = 0;
8735         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8736     }
8737 
8738     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8739     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8740 
8741     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8742     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8743 
8744     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8745     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8746 
8747     /* face detection (default to OFF) */
8748     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8749     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8750 
8751     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8752     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8753 
8754     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8755     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8756 
8757     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8758     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8759 
8760 
8761     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8762     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8763 
8764     /* Exposure time(Update the Min Exposure Time)*/
8765     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8766     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8767 
8768     /* frame duration */
8769     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8770     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8771 
8772     /* sensitivity */
8773     static const int32_t default_sensitivity = 100;
8774     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8775     static const int32_t default_isp_sensitivity =
8776             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8777     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8778 
8779     /*edge mode*/
8780     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8781 
8782     /*noise reduction mode*/
8783     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8784 
8785     /*color correction mode*/
8786     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8787     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8788 
8789     /*transform matrix mode*/
8790     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8791 
8792     int32_t scaler_crop_region[4];
8793     scaler_crop_region[0] = 0;
8794     scaler_crop_region[1] = 0;
8795     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8796     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8797     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8798 
8799     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8800     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8801 
8802     /*focus distance*/
8803     float focus_distance = 0.0;
8804     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8805 
8806     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8807     /* Restrict default preview template to max 30 fps */
8808     float max_range = 0.0;
8809     float max_fixed_fps = 0.0;
8810     int32_t fps_range[2] = {0, 0};
8811     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8812             i++) {
8813         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8814                 TEMPLATE_MAX_PREVIEW_FPS) {
8815             continue;
8816         }
8817         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8818             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8819         if (type == CAMERA3_TEMPLATE_PREVIEW ||
8820                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8821                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8822             if (range > max_range) {
8823                 fps_range[0] =
8824                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8825                 fps_range[1] =
8826                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8827                 max_range = range;
8828             }
8829         } else {
8830             if (range < 0.01 && max_fixed_fps <
8831                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8832                 fps_range[0] =
8833                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8834                 fps_range[1] =
8835                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8836                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8837             }
8838         }
8839     }
8840     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8841 
8842     /*precapture trigger*/
8843     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8844     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8845 
8846     /*af trigger*/
8847     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8848     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8849 
8850     /* ae & af regions */
8851     int32_t active_region[] = {
8852             gCamCapability[mCameraId]->active_array_size.left,
8853             gCamCapability[mCameraId]->active_array_size.top,
8854             gCamCapability[mCameraId]->active_array_size.left +
8855                     gCamCapability[mCameraId]->active_array_size.width,
8856             gCamCapability[mCameraId]->active_array_size.top +
8857                     gCamCapability[mCameraId]->active_array_size.height,
8858             0};
8859     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8860             sizeof(active_region) / sizeof(active_region[0]));
8861     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8862             sizeof(active_region) / sizeof(active_region[0]));
8863 
8864     /* black level lock */
8865     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8866     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8867 
8868     //special defaults for manual template
8869     if (type == CAMERA3_TEMPLATE_MANUAL) {
8870         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8871         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8872 
8873         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8874         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8875 
8876         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8877         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8878 
8879         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8880         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8881 
8882         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8883         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8884 
8885         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8886         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8887     }
8888 
8889 
8890     /* TNR
8891      * We'll use this location to determine which modes TNR will be set.
8892      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8893      * This is not to be confused with linking on a per stream basis that decision
8894      * is still on per-session basis and will be handled as part of config stream
8895      */
8896     uint8_t tnr_enable = 0;
8897 
8898     if (m_bTnrPreview || m_bTnrVideo) {
8899 
8900         switch (type) {
8901             case CAMERA3_TEMPLATE_VIDEO_RECORD:
8902                     tnr_enable = 1;
8903                     break;
8904 
8905             default:
8906                     tnr_enable = 0;
8907                     break;
8908         }
8909 
8910         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8911         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8912         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8913 
8914         LOGD("TNR:%d with process plate %d for template:%d",
8915                              tnr_enable, tnr_process_type, type);
8916     }
8917 
8918     //Update Link tags to default
8919     uint8_t sync_type = CAM_TYPE_STANDALONE;
8920     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8921 
8922     uint8_t is_main = 0; //this doesn't matter as app should overwrite
8923     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8924 
8925     uint8_t related_camera_id = mCameraId;
8926     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
8927 
8928     /* CDS default */
8929     char prop[PROPERTY_VALUE_MAX];
8930     memset(prop, 0, sizeof(prop));
8931     property_get("persist.camera.CDS", prop, "Auto");
8932     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8933     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8934     if (CAM_CDS_MODE_MAX == cds_mode) {
8935         cds_mode = CAM_CDS_MODE_AUTO;
8936     }
8937 
8938     /* Disabling CDS in templates which have TNR enabled*/
8939     if (tnr_enable)
8940         cds_mode = CAM_CDS_MODE_OFF;
8941 
8942     int32_t mode = cds_mode;
8943     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8944 
8945     /* hybrid ae */
8946     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8947 
8948     mDefaultMetadata[type] = settings.release();
8949 
8950     return mDefaultMetadata[type];
8951 }
8952 
8953 /*===========================================================================
8954  * FUNCTION   : setFrameParameters
8955  *
8956  * DESCRIPTION: set parameters per frame as requested in the metadata from
8957  *              framework
8958  *
8959  * PARAMETERS :
8960  *   @request   : request that needs to be serviced
8961  *   @streamsArray : Stream ID of all the requested streams
8962  *   @blob_request: Whether this request is a blob request or not
8963  *
8964  * RETURN     : success: NO_ERROR
8965  *              failure:
8966  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)8967 int QCamera3HardwareInterface::setFrameParameters(
8968                     camera3_capture_request_t *request,
8969                     cam_stream_ID_t streamsArray,
8970                     int blob_request,
8971                     uint32_t snapshotStreamId)
8972 {
8973     /*translate from camera_metadata_t type to parm_type_t*/
8974     int rc = 0;
8975     int32_t hal_version = CAM_HAL_V3;
8976 
8977     clear_metadata_buffer(mParameters);
8978     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8979         LOGE("Failed to set hal version in the parameters");
8980         return BAD_VALUE;
8981     }
8982 
8983     /*we need to update the frame number in the parameters*/
8984     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8985             request->frame_number)) {
8986         LOGE("Failed to set the frame number in the parameters");
8987         return BAD_VALUE;
8988     }
8989 
8990     /* Update stream id of all the requested buffers */
8991     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8992         LOGE("Failed to set stream type mask in the parameters");
8993         return BAD_VALUE;
8994     }
8995 
8996     if (mUpdateDebugLevel) {
8997         uint32_t dummyDebugLevel = 0;
8998         /* The value of dummyDebugLevel is irrelavent. On
8999          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9000         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9001                 dummyDebugLevel)) {
9002             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9003             return BAD_VALUE;
9004         }
9005         mUpdateDebugLevel = false;
9006     }
9007 
9008     if(request->settings != NULL){
9009         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9010         if (blob_request)
9011             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9012     }
9013 
9014     return rc;
9015 }
9016 
9017 /*===========================================================================
9018  * FUNCTION   : setReprocParameters
9019  *
9020  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9021  *              return it.
9022  *
9023  * PARAMETERS :
9024  *   @request   : request that needs to be serviced
9025  *
9026  * RETURN     : success: NO_ERROR
9027  *              failure:
9028  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)9029 int32_t QCamera3HardwareInterface::setReprocParameters(
9030         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9031         uint32_t snapshotStreamId)
9032 {
9033     /*translate from camera_metadata_t type to parm_type_t*/
9034     int rc = 0;
9035 
9036     if (NULL == request->settings){
9037         LOGE("Reprocess settings cannot be NULL");
9038         return BAD_VALUE;
9039     }
9040 
9041     if (NULL == reprocParam) {
9042         LOGE("Invalid reprocessing metadata buffer");
9043         return BAD_VALUE;
9044     }
9045     clear_metadata_buffer(reprocParam);
9046 
9047     /*we need to update the frame number in the parameters*/
9048     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9049             request->frame_number)) {
9050         LOGE("Failed to set the frame number in the parameters");
9051         return BAD_VALUE;
9052     }
9053 
9054     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9055     if (rc < 0) {
9056         LOGE("Failed to translate reproc request");
9057         return rc;
9058     }
9059 
9060     CameraMetadata frame_settings;
9061     frame_settings = request->settings;
9062     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9063             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9064         int32_t *crop_count =
9065                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9066         int32_t *crop_data =
9067                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9068         int32_t *roi_map =
9069                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9070         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9071             cam_crop_data_t crop_meta;
9072             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9073             crop_meta.num_of_streams = 1;
9074             crop_meta.crop_info[0].crop.left   = crop_data[0];
9075             crop_meta.crop_info[0].crop.top    = crop_data[1];
9076             crop_meta.crop_info[0].crop.width  = crop_data[2];
9077             crop_meta.crop_info[0].crop.height = crop_data[3];
9078 
9079             crop_meta.crop_info[0].roi_map.left =
9080                     roi_map[0];
9081             crop_meta.crop_info[0].roi_map.top =
9082                     roi_map[1];
9083             crop_meta.crop_info[0].roi_map.width =
9084                     roi_map[2];
9085             crop_meta.crop_info[0].roi_map.height =
9086                     roi_map[3];
9087 
9088             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9089                 rc = BAD_VALUE;
9090             }
9091             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9092                     request->input_buffer->stream,
9093                     crop_meta.crop_info[0].crop.left,
9094                     crop_meta.crop_info[0].crop.top,
9095                     crop_meta.crop_info[0].crop.width,
9096                     crop_meta.crop_info[0].crop.height);
9097             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9098                     request->input_buffer->stream,
9099                     crop_meta.crop_info[0].roi_map.left,
9100                     crop_meta.crop_info[0].roi_map.top,
9101                     crop_meta.crop_info[0].roi_map.width,
9102                     crop_meta.crop_info[0].roi_map.height);
9103             } else {
9104                 LOGE("Invalid reprocess crop count %d!", *crop_count);
9105             }
9106     } else {
9107         LOGE("No crop data from matching output stream");
9108     }
9109 
9110     /* These settings are not needed for regular requests so handle them specially for
9111        reprocess requests; information needed for EXIF tags */
9112     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9113         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9114                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9115         if (NAME_NOT_FOUND != val) {
9116             uint32_t flashMode = (uint32_t)val;
9117             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9118                 rc = BAD_VALUE;
9119             }
9120         } else {
9121             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9122                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9123         }
9124     } else {
9125         LOGH("No flash mode in reprocess settings");
9126     }
9127 
9128     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9129         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9130         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9131             rc = BAD_VALUE;
9132         }
9133     } else {
9134         LOGH("No flash state in reprocess settings");
9135     }
9136 
9137     return rc;
9138 }
9139 
9140 /*===========================================================================
9141  * FUNCTION   : saveRequestSettings
9142  *
9143  * DESCRIPTION: Add any settings that might have changed to the request settings
9144  *              and save the settings to be applied on the frame
9145  *
9146  * PARAMETERS :
9147  *   @jpegMetadata : the extracted and/or modified jpeg metadata
9148  *   @request      : request with initial settings
9149  *
9150  * RETURN     :
9151  * camera_metadata_t* : pointer to the saved request settings
9152  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)9153 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9154         const CameraMetadata &jpegMetadata,
9155         camera3_capture_request_t *request)
9156 {
9157     camera_metadata_t *resultMetadata;
9158     CameraMetadata camMetadata;
9159     camMetadata = request->settings;
9160 
9161     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9162         int32_t thumbnail_size[2];
9163         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9164         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9165         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9166                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9167     }
9168 
9169     resultMetadata = camMetadata.release();
9170     return resultMetadata;
9171 }
9172 
9173 /*===========================================================================
9174  * FUNCTION   : setHalFpsRange
9175  *
9176  * DESCRIPTION: set FPS range parameter
9177  *
9178  *
9179  * PARAMETERS :
9180  *   @settings    : Metadata from framework
9181  *   @hal_metadata: Metadata buffer
9182  *
9183  *
9184  * RETURN     : success: NO_ERROR
9185  *              failure:
9186  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)9187 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9188         metadata_buffer_t *hal_metadata)
9189 {
9190     int32_t rc = NO_ERROR;
9191     cam_fps_range_t fps_range;
9192     fps_range.min_fps = (float)
9193             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9194     fps_range.max_fps = (float)
9195             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9196     fps_range.video_min_fps = fps_range.min_fps;
9197     fps_range.video_max_fps = fps_range.max_fps;
9198 
9199     LOGD("aeTargetFpsRange fps: [%f %f]",
9200             fps_range.min_fps, fps_range.max_fps);
9201     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9202      * follows:
9203      * ---------------------------------------------------------------|
9204      *      Video stream is absent in configure_streams               |
9205      *    (Camcorder preview before the first video record            |
9206      * ---------------------------------------------------------------|
9207      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9208      *                   |             |             | vid_min/max_fps|
9209      * ---------------------------------------------------------------|
9210      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9211      *                   |-------------|-------------|----------------|
9212      *                   |  [240, 240] |     240     |  [240, 240]    |
9213      * ---------------------------------------------------------------|
9214      *     Video stream is present in configure_streams               |
9215      * ---------------------------------------------------------------|
9216      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9217      *                   |             |             | vid_min/max_fps|
9218      * ---------------------------------------------------------------|
9219      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9220      * (camcorder prev   |-------------|-------------|----------------|
9221      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9222      *  is stopped)      |             |             |                |
9223      * ---------------------------------------------------------------|
9224      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9225      *                   |-------------|-------------|----------------|
9226      *                   |  [240, 240] |     240     |  [240, 240]    |
9227      * ---------------------------------------------------------------|
9228      * When Video stream is absent in configure_streams,
9229      * preview fps = sensor_fps / batchsize
9230      * Eg: for 240fps at batchSize 4, preview = 60fps
9231      *     for 120fps at batchSize 4, preview = 30fps
9232      *
9233      * When video stream is present in configure_streams, preview fps is as per
9234      * the ratio of preview buffers to video buffers requested in process
9235      * capture request
9236      */
9237     mBatchSize = 0;
9238     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9239         fps_range.min_fps = fps_range.video_max_fps;
9240         fps_range.video_min_fps = fps_range.video_max_fps;
9241         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9242                 fps_range.max_fps);
9243         if (NAME_NOT_FOUND != val) {
9244             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9245             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9246                 return BAD_VALUE;
9247             }
9248 
9249             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9250                 /* If batchmode is currently in progress and the fps changes,
9251                  * set the flag to restart the sensor */
9252                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9253                         (mHFRVideoFps != fps_range.max_fps)) {
9254                     mNeedSensorRestart = true;
9255                 }
9256                 mHFRVideoFps = fps_range.max_fps;
9257                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9258                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9259                     mBatchSize = MAX_HFR_BATCH_SIZE;
9260                 }
9261              }
9262             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9263 
9264          }
9265     } else {
9266         /* HFR mode is session param in backend/ISP. This should be reset when
9267          * in non-HFR mode  */
9268         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9269         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9270             return BAD_VALUE;
9271         }
9272     }
9273     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9274         return BAD_VALUE;
9275     }
9276     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9277             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9278     return rc;
9279 }
9280 
9281 /*===========================================================================
9282  * FUNCTION   : translateToHalMetadata
9283  *
9284  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9285  *
9286  *
9287  * PARAMETERS :
9288  *   @request  : request sent from framework
9289  *
9290  *
9291  * RETURN     : success: NO_ERROR
9292  *              failure:
9293  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)9294 int QCamera3HardwareInterface::translateToHalMetadata
9295                                   (const camera3_capture_request_t *request,
9296                                    metadata_buffer_t *hal_metadata,
9297                                    uint32_t snapshotStreamId)
9298 {
9299     int rc = 0;
9300     CameraMetadata frame_settings;
9301     frame_settings = request->settings;
9302 
9303     /* Do not change the order of the following list unless you know what you are
9304      * doing.
9305      * The order is laid out in such a way that parameters in the front of the table
9306      * may be used to override the parameters later in the table. Examples are:
9307      * 1. META_MODE should precede AEC/AWB/AF MODE
9308      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9309      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9310      * 4. Any mode should precede it's corresponding settings
9311      */
9312     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9313         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9314         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9315             rc = BAD_VALUE;
9316         }
9317         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9318         if (rc != NO_ERROR) {
9319             LOGE("extractSceneMode failed");
9320         }
9321     }
9322 
9323     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9324         uint8_t fwk_aeMode =
9325             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9326         uint8_t aeMode;
9327         int32_t redeye;
9328 
9329         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9330             aeMode = CAM_AE_MODE_OFF;
9331         } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
9332             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
9333         } else {
9334             aeMode = CAM_AE_MODE_ON;
9335         }
9336         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9337             redeye = 1;
9338         } else {
9339             redeye = 0;
9340         }
9341 
9342         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9343                 fwk_aeMode);
9344         if (NAME_NOT_FOUND != val) {
9345             int32_t flashMode = (int32_t)val;
9346             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9347         }
9348 
9349         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9350         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9351             rc = BAD_VALUE;
9352         }
9353     }
9354 
9355     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9356         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9357         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9358                 fwk_whiteLevel);
9359         if (NAME_NOT_FOUND != val) {
9360             uint8_t whiteLevel = (uint8_t)val;
9361             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9362                 rc = BAD_VALUE;
9363             }
9364         }
9365     }
9366 
9367     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9368         uint8_t fwk_cacMode =
9369                 frame_settings.find(
9370                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9371         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9372                 fwk_cacMode);
9373         if (NAME_NOT_FOUND != val) {
9374             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9375             bool entryAvailable = FALSE;
9376             // Check whether Frameworks set CAC mode is supported in device or not
9377             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9378                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9379                     entryAvailable = TRUE;
9380                     break;
9381                 }
9382             }
9383             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9384             // If entry not found then set the device supported mode instead of frameworks mode i.e,
9385             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9386             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9387             if (entryAvailable == FALSE) {
9388                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9389                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9390                 } else {
9391                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9392                         // High is not supported and so set the FAST as spec say's underlying
9393                         // device implementation can be the same for both modes.
9394                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9395                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9396                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9397                         // in order to avoid the fps drop due to high quality
9398                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9399                     } else {
9400                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9401                     }
9402                 }
9403             }
9404             LOGD("Final cacMode is %d", cacMode);
9405             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9406                 rc = BAD_VALUE;
9407             }
9408         } else {
9409             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9410         }
9411     }
9412 
9413     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9414         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9415         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9416                 fwk_focusMode);
9417         if (NAME_NOT_FOUND != val) {
9418             uint8_t focusMode = (uint8_t)val;
9419             LOGD("set focus mode %d", focusMode);
9420             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9421                 rc = BAD_VALUE;
9422             }
9423         }
9424     }
9425 
9426     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9427         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9428         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9429                 focalDistance)) {
9430             rc = BAD_VALUE;
9431         }
9432     }
9433 
9434     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9435         uint8_t fwk_antibandingMode =
9436                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9437         int val = lookupHalName(ANTIBANDING_MODES_MAP,
9438                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9439         if (NAME_NOT_FOUND != val) {
9440             uint32_t hal_antibandingMode = (uint32_t)val;
9441             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9442                 if (m60HzZone) {
9443                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9444                 } else {
9445                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9446                 }
9447             }
9448             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9449                     hal_antibandingMode)) {
9450                 rc = BAD_VALUE;
9451             }
9452         }
9453     }
9454 
9455     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9456         int32_t expCompensation = frame_settings.find(
9457                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9458         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9459             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9460         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9461             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9462         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9463                 expCompensation)) {
9464             rc = BAD_VALUE;
9465         }
9466     }
9467 
9468     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9469         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9470         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9471             rc = BAD_VALUE;
9472         }
9473     }
9474     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9475         rc = setHalFpsRange(frame_settings, hal_metadata);
9476         if (rc != NO_ERROR) {
9477             LOGE("setHalFpsRange failed");
9478         }
9479     }
9480 
9481     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9482         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9483         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9484             rc = BAD_VALUE;
9485         }
9486     }
9487 
9488     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9489         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9490         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9491                 fwk_effectMode);
9492         if (NAME_NOT_FOUND != val) {
9493             uint8_t effectMode = (uint8_t)val;
9494             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9495                 rc = BAD_VALUE;
9496             }
9497         }
9498     }
9499 
9500     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9501         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9502         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9503                 colorCorrectMode)) {
9504             rc = BAD_VALUE;
9505         }
9506     }
9507 
9508     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9509         cam_color_correct_gains_t colorCorrectGains;
9510         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9511             colorCorrectGains.gains[i] =
9512                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9513         }
9514         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9515                 colorCorrectGains)) {
9516             rc = BAD_VALUE;
9517         }
9518     }
9519 
9520     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9521         cam_color_correct_matrix_t colorCorrectTransform;
9522         cam_rational_type_t transform_elem;
9523         size_t num = 0;
9524         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9525            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9526               transform_elem.numerator =
9527                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9528               transform_elem.denominator =
9529                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9530               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9531               num++;
9532            }
9533         }
9534         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9535                 colorCorrectTransform)) {
9536             rc = BAD_VALUE;
9537         }
9538     }
9539 
9540     cam_trigger_t aecTrigger;
9541     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9542     aecTrigger.trigger_id = -1;
9543     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9544         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9545         aecTrigger.trigger =
9546             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9547         aecTrigger.trigger_id =
9548             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9549         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9550                 aecTrigger)) {
9551             rc = BAD_VALUE;
9552         }
9553         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9554                 aecTrigger.trigger, aecTrigger.trigger_id);
9555     }
9556 
9557     /*af_trigger must come with a trigger id*/
9558     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9559         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9560         cam_trigger_t af_trigger;
9561         af_trigger.trigger =
9562             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9563         af_trigger.trigger_id =
9564             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9565         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9566             rc = BAD_VALUE;
9567         }
9568         LOGD("AfTrigger: %d AfTriggerID: %d",
9569                 af_trigger.trigger, af_trigger.trigger_id);
9570     }
9571 
9572     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9573         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9574         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9575             rc = BAD_VALUE;
9576         }
9577     }
9578     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9579         cam_edge_application_t edge_application;
9580         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9581         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9582             edge_application.sharpness = 0;
9583         } else {
9584             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9585         }
9586         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9587             rc = BAD_VALUE;
9588         }
9589     }
9590 
9591     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9592         int32_t respectFlashMode = 1;
9593         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9594             uint8_t fwk_aeMode =
9595                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9596             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
9597                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
9598                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9599                 respectFlashMode = 0;
9600                 LOGH("AE Mode controls flash, ignore android.flash.mode");
9601             }
9602         }
9603         if (respectFlashMode) {
9604             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9605                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9606             LOGH("flash mode after mapping %d", val);
9607             // To check: CAM_INTF_META_FLASH_MODE usage
9608             if (NAME_NOT_FOUND != val) {
9609                 uint8_t flashMode = (uint8_t)val;
9610                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9611                     rc = BAD_VALUE;
9612                 }
9613             }
9614         }
9615     }
9616 
9617     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9618         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9619         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9620             rc = BAD_VALUE;
9621         }
9622     }
9623 
9624     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9625         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9626         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9627                 flashFiringTime)) {
9628             rc = BAD_VALUE;
9629         }
9630     }
9631 
9632     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9633         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9634         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9635                 hotPixelMode)) {
9636             rc = BAD_VALUE;
9637         }
9638     }
9639 
9640     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9641         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9642         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9643                 lensAperture)) {
9644             rc = BAD_VALUE;
9645         }
9646     }
9647 
9648     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9649         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9650         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9651                 filterDensity)) {
9652             rc = BAD_VALUE;
9653         }
9654     }
9655 
9656     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9657         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9658         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9659                 focalLength)) {
9660             rc = BAD_VALUE;
9661         }
9662     }
9663 
9664     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9665         uint8_t optStabMode =
9666                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9667         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9668                 optStabMode)) {
9669             rc = BAD_VALUE;
9670         }
9671     }
9672 
9673     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9674         uint8_t videoStabMode =
9675                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9676         LOGD("videoStabMode from APP = %d", videoStabMode);
9677         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9678                 videoStabMode)) {
9679             rc = BAD_VALUE;
9680         }
9681     }
9682 
9683 
9684     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9685         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9686         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9687                 noiseRedMode)) {
9688             rc = BAD_VALUE;
9689         }
9690     }
9691 
9692     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9693         float reprocessEffectiveExposureFactor =
9694             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9695         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9696                 reprocessEffectiveExposureFactor)) {
9697             rc = BAD_VALUE;
9698         }
9699     }
9700 
9701     cam_crop_region_t scalerCropRegion;
9702     bool scalerCropSet = false;
9703     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9704         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9705         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9706         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9707         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9708 
9709         // Map coordinate system from active array to sensor output.
9710         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9711                 scalerCropRegion.width, scalerCropRegion.height);
9712 
9713         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9714                 scalerCropRegion)) {
9715             rc = BAD_VALUE;
9716         }
9717         scalerCropSet = true;
9718     }
9719 
9720     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9721         int64_t sensorExpTime =
9722                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9723         LOGD("setting sensorExpTime %lld", sensorExpTime);
9724         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9725                 sensorExpTime)) {
9726             rc = BAD_VALUE;
9727         }
9728     }
9729 
9730     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9731         int64_t sensorFrameDuration =
9732                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9733         int64_t minFrameDuration = getMinFrameDuration(request);
9734         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9735         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9736             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9737         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9738         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9739                 sensorFrameDuration)) {
9740             rc = BAD_VALUE;
9741         }
9742     }
9743 
9744     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9745         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9746         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9747                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9748         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9749                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9750         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9751         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9752                 sensorSensitivity)) {
9753             rc = BAD_VALUE;
9754         }
9755     }
9756 
9757     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9758         int32_t ispSensitivity =
9759             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9760         if (ispSensitivity <
9761             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9762                 ispSensitivity =
9763                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9764                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9765         }
9766         if (ispSensitivity >
9767             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9768                 ispSensitivity =
9769                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9770                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9771         }
9772         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9773                 ispSensitivity)) {
9774             rc = BAD_VALUE;
9775         }
9776     }
9777 
9778     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9779         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9780         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9781             rc = BAD_VALUE;
9782         }
9783     }
9784 
9785     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9786         uint8_t fwk_facedetectMode =
9787                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9788 
9789         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9790                 fwk_facedetectMode);
9791 
9792         if (NAME_NOT_FOUND != val) {
9793             uint8_t facedetectMode = (uint8_t)val;
9794             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9795                     facedetectMode)) {
9796                 rc = BAD_VALUE;
9797             }
9798         }
9799     }
9800 
9801     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9802         uint8_t histogramMode =
9803                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9804         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9805                 histogramMode)) {
9806             rc = BAD_VALUE;
9807         }
9808     }
9809 
9810     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9811         uint8_t sharpnessMapMode =
9812                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9813         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9814                 sharpnessMapMode)) {
9815             rc = BAD_VALUE;
9816         }
9817     }
9818 
9819     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9820         uint8_t tonemapMode =
9821                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9822         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9823             rc = BAD_VALUE;
9824         }
9825     }
9826     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9827     /*All tonemap channels will have the same number of points*/
9828     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9829         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9830         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9831         cam_rgb_tonemap_curves tonemapCurves;
9832         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9833         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9834             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9835                      tonemapCurves.tonemap_points_cnt,
9836                     CAM_MAX_TONEMAP_CURVE_SIZE);
9837             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9838         }
9839 
9840         /* ch0 = G*/
9841         size_t point = 0;
9842         cam_tonemap_curve_t tonemapCurveGreen;
9843         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9844             for (size_t j = 0; j < 2; j++) {
9845                tonemapCurveGreen.tonemap_points[i][j] =
9846                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9847                point++;
9848             }
9849         }
9850         tonemapCurves.curves[0] = tonemapCurveGreen;
9851 
9852         /* ch 1 = B */
9853         point = 0;
9854         cam_tonemap_curve_t tonemapCurveBlue;
9855         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9856             for (size_t j = 0; j < 2; j++) {
9857                tonemapCurveBlue.tonemap_points[i][j] =
9858                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9859                point++;
9860             }
9861         }
9862         tonemapCurves.curves[1] = tonemapCurveBlue;
9863 
9864         /* ch 2 = R */
9865         point = 0;
9866         cam_tonemap_curve_t tonemapCurveRed;
9867         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9868             for (size_t j = 0; j < 2; j++) {
9869                tonemapCurveRed.tonemap_points[i][j] =
9870                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9871                point++;
9872             }
9873         }
9874         tonemapCurves.curves[2] = tonemapCurveRed;
9875 
9876         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9877                 tonemapCurves)) {
9878             rc = BAD_VALUE;
9879         }
9880     }
9881 
9882     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9883         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9884         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9885                 captureIntent)) {
9886             rc = BAD_VALUE;
9887         }
9888     }
9889 
9890     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9891         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9892         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9893                 blackLevelLock)) {
9894             rc = BAD_VALUE;
9895         }
9896     }
9897 
9898     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9899         uint8_t lensShadingMapMode =
9900                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9901         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9902                 lensShadingMapMode)) {
9903             rc = BAD_VALUE;
9904         }
9905     }
9906 
9907     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9908         cam_area_t roi;
9909         bool reset = true;
9910         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9911 
9912         // Map coordinate system from active array to sensor output.
9913         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9914                 roi.rect.height);
9915 
9916         if (scalerCropSet) {
9917             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9918         }
9919         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9920             rc = BAD_VALUE;
9921         }
9922     }
9923 
9924     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9925         cam_area_t roi;
9926         bool reset = true;
9927         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9928 
9929         // Map coordinate system from active array to sensor output.
9930         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9931                 roi.rect.height);
9932 
9933         if (scalerCropSet) {
9934             reset = resetIfNeededROI(&roi, &scalerCropRegion);
9935         }
9936         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9937             rc = BAD_VALUE;
9938         }
9939     }
9940 
9941     // CDS for non-HFR non-video mode
9942     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9943             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9944         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9945         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9946             LOGE("Invalid CDS mode %d!", *fwk_cds);
9947         } else {
9948             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9949                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9950                 rc = BAD_VALUE;
9951             }
9952         }
9953     }
9954 
9955     // TNR
9956     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9957         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9958         uint8_t b_TnrRequested = 0;
9959         cam_denoise_param_t tnr;
9960         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9961         tnr.process_plates =
9962             (cam_denoise_process_type_t)frame_settings.find(
9963             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9964         b_TnrRequested = tnr.denoise_enable;
9965         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9966             rc = BAD_VALUE;
9967         }
9968     }
9969 
9970     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9971         int32_t fwk_testPatternMode =
9972                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9973         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9974                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9975 
9976         if (NAME_NOT_FOUND != testPatternMode) {
9977             cam_test_pattern_data_t testPatternData;
9978             memset(&testPatternData, 0, sizeof(testPatternData));
9979             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9980             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9981                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9982                 int32_t *fwk_testPatternData =
9983                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9984                 testPatternData.r = fwk_testPatternData[0];
9985                 testPatternData.b = fwk_testPatternData[3];
9986                 switch (gCamCapability[mCameraId]->color_arrangement) {
9987                     case CAM_FILTER_ARRANGEMENT_RGGB:
9988                     case CAM_FILTER_ARRANGEMENT_GRBG:
9989                         testPatternData.gr = fwk_testPatternData[1];
9990                         testPatternData.gb = fwk_testPatternData[2];
9991                         break;
9992                     case CAM_FILTER_ARRANGEMENT_GBRG:
9993                     case CAM_FILTER_ARRANGEMENT_BGGR:
9994                         testPatternData.gr = fwk_testPatternData[2];
9995                         testPatternData.gb = fwk_testPatternData[1];
9996                         break;
9997                     default:
9998                         LOGE("color arrangement %d is not supported",
9999                                 gCamCapability[mCameraId]->color_arrangement);
10000                         break;
10001                 }
10002             }
10003             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
10004                     testPatternData)) {
10005                 rc = BAD_VALUE;
10006             }
10007         } else {
10008             LOGE("Invalid framework sensor test pattern mode %d",
10009                     fwk_testPatternMode);
10010         }
10011     }
10012 
10013     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10014         size_t count = 0;
10015         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
10016         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
10017                 gps_coords.data.d, gps_coords.count, count);
10018         if (gps_coords.count != count) {
10019             rc = BAD_VALUE;
10020         }
10021     }
10022 
10023     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
10024         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
10025         size_t count = 0;
10026         const char *gps_methods_src = (const char *)
10027                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
10028         memset(gps_methods, '\0', sizeof(gps_methods));
10029         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
10030         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
10031                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
10032         if (GPS_PROCESSING_METHOD_SIZE != count) {
10033             rc = BAD_VALUE;
10034         }
10035     }
10036 
10037     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
10038         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
10039         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
10040                 gps_timestamp)) {
10041             rc = BAD_VALUE;
10042         }
10043     }
10044 
10045     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
10046         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
10047         cam_rotation_info_t rotation_info;
10048         if (orientation == 0) {
10049            rotation_info.rotation = ROTATE_0;
10050         } else if (orientation == 90) {
10051            rotation_info.rotation = ROTATE_90;
10052         } else if (orientation == 180) {
10053            rotation_info.rotation = ROTATE_180;
10054         } else if (orientation == 270) {
10055            rotation_info.rotation = ROTATE_270;
10056         }
10057         rotation_info.device_rotation = ROTATE_0;
10058         rotation_info.streamId = snapshotStreamId;
10059         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
10060         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
10061             rc = BAD_VALUE;
10062         }
10063     }
10064 
10065     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
10066         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
10067         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
10068             rc = BAD_VALUE;
10069         }
10070     }
10071 
10072     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
10073         uint32_t thumb_quality = (uint32_t)
10074                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
10075         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
10076                 thumb_quality)) {
10077             rc = BAD_VALUE;
10078         }
10079     }
10080 
10081     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10082         cam_dimension_t dim;
10083         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10084         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10085         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
10086             rc = BAD_VALUE;
10087         }
10088     }
10089 
10090     // Internal metadata
10091     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
10092         size_t count = 0;
10093         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
10094         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
10095                 privatedata.data.i32, privatedata.count, count);
10096         if (privatedata.count != count) {
10097             rc = BAD_VALUE;
10098         }
10099     }
10100 
10101     // EV step
10102     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
10103             gCamCapability[mCameraId]->exp_compensation_step)) {
10104         rc = BAD_VALUE;
10105     }
10106 
10107     // CDS info
10108     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
10109         cam_cds_data_t *cdsData = (cam_cds_data_t *)
10110                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
10111 
10112         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10113                 CAM_INTF_META_CDS_DATA, *cdsData)) {
10114             rc = BAD_VALUE;
10115         }
10116     }
10117 
10118     // Hybrid AE
10119     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
10120         uint8_t *hybrid_ae = (uint8_t *)
10121                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
10122 
10123         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10124                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
10125             rc = BAD_VALUE;
10126         }
10127     }
10128 
10129     return rc;
10130 }
10131 
10132 /*===========================================================================
10133  * FUNCTION   : captureResultCb
10134  *
10135  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
10136  *
10137  * PARAMETERS :
10138  *   @frame  : frame information from mm-camera-interface
10139  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
10140  *   @userdata: userdata
10141  *
10142  * RETURN     : NONE
10143  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)10144 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
10145                 camera3_stream_buffer_t *buffer,
10146                 uint32_t frame_number, bool isInputBuffer, void *userdata)
10147 {
10148     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10149     if (hw == NULL) {
10150         LOGE("Invalid hw %p", hw);
10151         return;
10152     }
10153 
10154     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10155     return;
10156 }
10157 
10158 /*===========================================================================
10159  * FUNCTION   : setBufferErrorStatus
10160  *
10161  * DESCRIPTION: Callback handler for channels to report any buffer errors
10162  *
10163  * PARAMETERS :
10164  *   @ch     : Channel on which buffer error is reported from
10165  *   @frame_number  : frame number on which buffer error is reported on
10166  *   @buffer_status : buffer error status
10167  *   @userdata: userdata
10168  *
10169  * RETURN     : NONE
10170  *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)10171 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10172                 uint32_t frame_number, camera3_buffer_status_t err,
10173                 void *userdata)
10174 {
10175     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10176     if (hw == NULL) {
10177         LOGE("Invalid hw %p", hw);
10178         return;
10179     }
10180 
10181     hw->setBufferErrorStatus(ch, frame_number, err);
10182     return;
10183 }
10184 
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)10185 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10186                 uint32_t frameNumber, camera3_buffer_status_t err)
10187 {
10188     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10189     pthread_mutex_lock(&mMutex);
10190 
10191     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10192         if (req.frame_number != frameNumber)
10193             continue;
10194         for (auto& k : req.mPendingBufferList) {
10195             if(k.stream->priv == ch) {
10196                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10197             }
10198         }
10199     }
10200 
10201     pthread_mutex_unlock(&mMutex);
10202     return;
10203 }
10204 /*===========================================================================
10205  * FUNCTION   : initialize
10206  *
10207  * DESCRIPTION: Pass framework callback pointers to HAL
10208  *
10209  * PARAMETERS :
10210  *
10211  *
10212  * RETURN     : Success : 0
10213  *              Failure: -ENODEV
10214  *==========================================================================*/
10215 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)10216 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10217                                   const camera3_callback_ops_t *callback_ops)
10218 {
10219     LOGD("E");
10220     QCamera3HardwareInterface *hw =
10221         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10222     if (!hw) {
10223         LOGE("NULL camera device");
10224         return -ENODEV;
10225     }
10226 
10227     int rc = hw->initialize(callback_ops);
10228     LOGD("X");
10229     return rc;
10230 }
10231 
10232 /*===========================================================================
10233  * FUNCTION   : configure_streams
10234  *
10235  * DESCRIPTION:
10236  *
10237  * PARAMETERS :
10238  *
10239  *
10240  * RETURN     : Success: 0
10241  *              Failure: -EINVAL (if stream configuration is invalid)
10242  *                       -ENODEV (fatal error)
10243  *==========================================================================*/
10244 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)10245 int QCamera3HardwareInterface::configure_streams(
10246         const struct camera3_device *device,
10247         camera3_stream_configuration_t *stream_list)
10248 {
10249     LOGD("E");
10250     QCamera3HardwareInterface *hw =
10251         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10252     if (!hw) {
10253         LOGE("NULL camera device");
10254         return -ENODEV;
10255     }
10256     int rc = hw->configureStreams(stream_list);
10257     LOGD("X");
10258     return rc;
10259 }
10260 
10261 /*===========================================================================
10262  * FUNCTION   : construct_default_request_settings
10263  *
10264  * DESCRIPTION: Configure a settings buffer to meet the required use case
10265  *
10266  * PARAMETERS :
10267  *
10268  *
10269  * RETURN     : Success: Return valid metadata
10270  *              Failure: Return NULL
10271  *==========================================================================*/
10272 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)10273     construct_default_request_settings(const struct camera3_device *device,
10274                                         int type)
10275 {
10276 
10277     LOGD("E");
10278     camera_metadata_t* fwk_metadata = NULL;
10279     QCamera3HardwareInterface *hw =
10280         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10281     if (!hw) {
10282         LOGE("NULL camera device");
10283         return NULL;
10284     }
10285 
10286     fwk_metadata = hw->translateCapabilityToMetadata(type);
10287 
10288     LOGD("X");
10289     return fwk_metadata;
10290 }
10291 
10292 /*===========================================================================
10293  * FUNCTION   : process_capture_request
10294  *
10295  * DESCRIPTION:
10296  *
10297  * PARAMETERS :
10298  *
10299  *
10300  * RETURN     :
10301  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)10302 int QCamera3HardwareInterface::process_capture_request(
10303                     const struct camera3_device *device,
10304                     camera3_capture_request_t *request)
10305 {
10306     LOGD("E");
10307     QCamera3HardwareInterface *hw =
10308         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10309     if (!hw) {
10310         LOGE("NULL camera device");
10311         return -EINVAL;
10312     }
10313 
10314     int rc = hw->processCaptureRequest(request);
10315     LOGD("X");
10316     return rc;
10317 }
10318 
10319 /*===========================================================================
10320  * FUNCTION   : dump
10321  *
10322  * DESCRIPTION:
10323  *
10324  * PARAMETERS :
10325  *
10326  *
10327  * RETURN     :
10328  *==========================================================================*/
10329 
dump(const struct camera3_device * device,int fd)10330 void QCamera3HardwareInterface::dump(
10331                 const struct camera3_device *device, int fd)
10332 {
10333     /* Log level property is read when "adb shell dumpsys media.camera" is
10334        called so that the log level can be controlled without restarting
10335        the media server */
10336     getLogLevel();
10337 
10338     LOGD("E");
10339     QCamera3HardwareInterface *hw =
10340         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10341     if (!hw) {
10342         LOGE("NULL camera device");
10343         return;
10344     }
10345 
10346     hw->dump(fd);
10347     LOGD("X");
10348     return;
10349 }
10350 
10351 /*===========================================================================
10352  * FUNCTION   : flush
10353  *
10354  * DESCRIPTION:
10355  *
10356  * PARAMETERS :
10357  *
10358  *
10359  * RETURN     :
10360  *==========================================================================*/
10361 
flush(const struct camera3_device * device)10362 int QCamera3HardwareInterface::flush(
10363                 const struct camera3_device *device)
10364 {
10365     int rc;
10366     LOGD("E");
10367     QCamera3HardwareInterface *hw =
10368         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10369     if (!hw) {
10370         LOGE("NULL camera device");
10371         return -EINVAL;
10372     }
10373 
10374     pthread_mutex_lock(&hw->mMutex);
10375     // Validate current state
10376     switch (hw->mState) {
10377         case STARTED:
10378             /* valid state */
10379             break;
10380 
10381         case ERROR:
10382             pthread_mutex_unlock(&hw->mMutex);
10383             hw->handleCameraDeviceError();
10384             return -ENODEV;
10385 
10386         default:
10387             LOGI("Flush returned during state %d", hw->mState);
10388             pthread_mutex_unlock(&hw->mMutex);
10389             return 0;
10390     }
10391     pthread_mutex_unlock(&hw->mMutex);
10392 
10393     rc = hw->flush(true /* restart channels */ );
10394     LOGD("X");
10395     return rc;
10396 }
10397 
10398 /*===========================================================================
10399  * FUNCTION   : close_camera_device
10400  *
10401  * DESCRIPTION:
10402  *
10403  * PARAMETERS :
10404  *
10405  *
10406  * RETURN     :
10407  *==========================================================================*/
close_camera_device(struct hw_device_t * device)10408 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10409 {
10410     int ret = NO_ERROR;
10411     QCamera3HardwareInterface *hw =
10412         reinterpret_cast<QCamera3HardwareInterface *>(
10413             reinterpret_cast<camera3_device_t *>(device)->priv);
10414     if (!hw) {
10415         LOGE("NULL camera device");
10416         return BAD_VALUE;
10417     }
10418 
10419     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10420     delete hw;
10421     LOGI("[KPI Perf]: X");
10422     return ret;
10423 }
10424 
10425 /*===========================================================================
10426  * FUNCTION   : getWaveletDenoiseProcessPlate
10427  *
10428  * DESCRIPTION: query wavelet denoise process plate
10429  *
10430  * PARAMETERS : None
10431  *
10432  * RETURN     : WNR prcocess plate value
10433  *==========================================================================*/
getWaveletDenoiseProcessPlate()10434 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10435 {
10436     char prop[PROPERTY_VALUE_MAX];
10437     memset(prop, 0, sizeof(prop));
10438     property_get("persist.denoise.process.plates", prop, "0");
10439     int processPlate = atoi(prop);
10440     switch(processPlate) {
10441     case 0:
10442         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10443     case 1:
10444         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10445     case 2:
10446         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10447     case 3:
10448         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10449     default:
10450         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10451     }
10452 }
10453 
10454 
10455 /*===========================================================================
10456  * FUNCTION   : getTemporalDenoiseProcessPlate
10457  *
10458  * DESCRIPTION: query temporal denoise process plate
10459  *
10460  * PARAMETERS : None
10461  *
10462  * RETURN     : TNR prcocess plate value
10463  *==========================================================================*/
getTemporalDenoiseProcessPlate()10464 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10465 {
10466     char prop[PROPERTY_VALUE_MAX];
10467     memset(prop, 0, sizeof(prop));
10468     property_get("persist.tnr.process.plates", prop, "0");
10469     int processPlate = atoi(prop);
10470     switch(processPlate) {
10471     case 0:
10472         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10473     case 1:
10474         return CAM_WAVELET_DENOISE_CBCR_ONLY;
10475     case 2:
10476         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10477     case 3:
10478         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10479     default:
10480         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10481     }
10482 }
10483 
10484 
10485 /*===========================================================================
10486  * FUNCTION   : extractSceneMode
10487  *
10488  * DESCRIPTION: Extract scene mode from frameworks set metadata
10489  *
10490  * PARAMETERS :
10491  *      @frame_settings: CameraMetadata reference
10492  *      @metaMode: ANDROID_CONTORL_MODE
10493  *      @hal_metadata: hal metadata structure
10494  *
10495  * RETURN     : None
10496  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)10497 int32_t QCamera3HardwareInterface::extractSceneMode(
10498         const CameraMetadata &frame_settings, uint8_t metaMode,
10499         metadata_buffer_t *hal_metadata)
10500 {
10501     int32_t rc = NO_ERROR;
10502 
10503     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10504         camera_metadata_ro_entry entry =
10505                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10506         if (0 == entry.count)
10507             return rc;
10508 
10509         uint8_t fwk_sceneMode = entry.data.u8[0];
10510 
10511         int val = lookupHalName(SCENE_MODES_MAP,
10512                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10513                 fwk_sceneMode);
10514         if (NAME_NOT_FOUND != val) {
10515             uint8_t sceneMode = (uint8_t)val;
10516             LOGD("sceneMode: %d", sceneMode);
10517             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10518                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10519                 rc = BAD_VALUE;
10520             }
10521         }
10522     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10523             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10524         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10525         LOGD("sceneMode: %d", sceneMode);
10526         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10527                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10528             rc = BAD_VALUE;
10529         }
10530     }
10531     return rc;
10532 }
10533 
10534 /*===========================================================================
10535  * FUNCTION   : needRotationReprocess
10536  *
10537  * DESCRIPTION: if rotation needs to be done by reprocess in pp
10538  *
10539  * PARAMETERS : none
10540  *
10541  * RETURN     : true: needed
10542  *              false: no need
10543  *==========================================================================*/
needRotationReprocess()10544 bool QCamera3HardwareInterface::needRotationReprocess()
10545 {
10546     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10547         // current rotation is not zero, and pp has the capability to process rotation
10548         LOGH("need do reprocess for rotation");
10549         return true;
10550     }
10551 
10552     return false;
10553 }
10554 
10555 /*===========================================================================
10556  * FUNCTION   : needReprocess
10557  *
10558  * DESCRIPTION: if reprocess in needed
10559  *
10560  * PARAMETERS : none
10561  *
10562  * RETURN     : true: needed
10563  *              false: no need
10564  *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)10565 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10566 {
10567     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10568         // TODO: add for ZSL HDR later
10569         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10570         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10571             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10572             return true;
10573         } else {
10574             LOGH("already post processed frame");
10575             return false;
10576         }
10577     }
10578     return needRotationReprocess();
10579 }
10580 
10581 /*===========================================================================
10582  * FUNCTION   : needJpegExifRotation
10583  *
10584  * DESCRIPTION: if rotation from jpeg is needed
10585  *
10586  * PARAMETERS : none
10587  *
10588  * RETURN     : true: needed
10589  *              false: no need
10590  *==========================================================================*/
needJpegExifRotation()10591 bool QCamera3HardwareInterface::needJpegExifRotation()
10592 {
10593    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10594     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10595        LOGD("Need use Jpeg EXIF Rotation");
10596        return true;
10597     }
10598     return false;
10599 }
10600 
10601 /*===========================================================================
10602  * FUNCTION   : addOfflineReprocChannel
10603  *
10604  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10605  *              coming from input channel
10606  *
10607  * PARAMETERS :
10608  *   @config  : reprocess configuration
10609  *   @inputChHandle : pointer to the input (source) channel
10610  *
10611  *
10612  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10613  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)10614 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10615         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10616 {
10617     int32_t rc = NO_ERROR;
10618     QCamera3ReprocessChannel *pChannel = NULL;
10619 
10620     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10621             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10622             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10623     if (NULL == pChannel) {
10624         LOGE("no mem for reprocess channel");
10625         return NULL;
10626     }
10627 
10628     rc = pChannel->initialize(IS_TYPE_NONE);
10629     if (rc != NO_ERROR) {
10630         LOGE("init reprocess channel failed, ret = %d", rc);
10631         delete pChannel;
10632         return NULL;
10633     }
10634 
10635     // pp feature config
10636     cam_pp_feature_config_t pp_config;
10637     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10638 
10639     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10640     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10641             & CAM_QCOM_FEATURE_DSDN) {
10642         //Use CPP CDS incase h/w supports it.
10643         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10644         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10645     }
10646     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10647         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10648     }
10649 
10650     rc = pChannel->addReprocStreamsFromSource(pp_config,
10651             config,
10652             IS_TYPE_NONE,
10653             mMetadataChannel);
10654 
10655     if (rc != NO_ERROR) {
10656         delete pChannel;
10657         return NULL;
10658     }
10659     return pChannel;
10660 }
10661 
10662 /*===========================================================================
10663  * FUNCTION   : getMobicatMask
10664  *
10665  * DESCRIPTION: returns mobicat mask
10666  *
10667  * PARAMETERS : none
10668  *
10669  * RETURN     : mobicat mask
10670  *
10671  *==========================================================================*/
getMobicatMask()10672 uint8_t QCamera3HardwareInterface::getMobicatMask()
10673 {
10674     return m_MobicatMask;
10675 }
10676 
10677 /*===========================================================================
10678  * FUNCTION   : setMobicat
10679  *
10680  * DESCRIPTION: set Mobicat on/off.
10681  *
10682  * PARAMETERS :
10683  *   @params  : none
10684  *
10685  * RETURN     : int32_t type of status
10686  *              NO_ERROR  -- success
10687  *              none-zero failure code
10688  *==========================================================================*/
setMobicat()10689 int32_t QCamera3HardwareInterface::setMobicat()
10690 {
10691     char value [PROPERTY_VALUE_MAX];
10692     property_get("persist.camera.mobicat", value, "0");
10693     int32_t ret = NO_ERROR;
10694     uint8_t enableMobi = (uint8_t)atoi(value);
10695 
10696     if (enableMobi) {
10697         tune_cmd_t tune_cmd;
10698         tune_cmd.type = SET_RELOAD_CHROMATIX;
10699         tune_cmd.module = MODULE_ALL;
10700         tune_cmd.value = TRUE;
10701         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10702                 CAM_INTF_PARM_SET_VFE_COMMAND,
10703                 tune_cmd);
10704 
10705         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10706                 CAM_INTF_PARM_SET_PP_COMMAND,
10707                 tune_cmd);
10708     }
10709     m_MobicatMask = enableMobi;
10710 
10711     return ret;
10712 }
10713 
10714 /*===========================================================================
10715 * FUNCTION   : getLogLevel
10716 *
10717 * DESCRIPTION: Reads the log level property into a variable
10718 *
10719 * PARAMETERS :
10720 *   None
10721 *
10722 * RETURN     :
10723 *   None
10724 *==========================================================================*/
getLogLevel()10725 void QCamera3HardwareInterface::getLogLevel()
10726 {
10727     char prop[PROPERTY_VALUE_MAX];
10728     uint32_t globalLogLevel = 0;
10729 
10730     property_get("persist.camera.hal.debug", prop, "0");
10731     int val = atoi(prop);
10732     if (0 <= val) {
10733         gCamHal3LogLevel = (uint32_t)val;
10734     }
10735 
10736     property_get("persist.camera.kpi.debug", prop, "1");
10737     gKpiDebugLevel = atoi(prop);
10738 
10739     property_get("persist.camera.global.debug", prop, "0");
10740     val = atoi(prop);
10741     if (0 <= val) {
10742         globalLogLevel = (uint32_t)val;
10743     }
10744 
10745     /* Highest log level among hal.logs and global.logs is selected */
10746     if (gCamHal3LogLevel < globalLogLevel)
10747         gCamHal3LogLevel = globalLogLevel;
10748 
10749     return;
10750 }
10751 
10752 /*===========================================================================
10753  * FUNCTION   : validateStreamRotations
10754  *
10755  * DESCRIPTION: Check if the rotations requested are supported
10756  *
10757  * PARAMETERS :
10758  *   @stream_list : streams to be configured
10759  *
10760  * RETURN     : NO_ERROR on success
10761  *              -EINVAL on failure
10762  *
10763  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)10764 int QCamera3HardwareInterface::validateStreamRotations(
10765         camera3_stream_configuration_t *streamList)
10766 {
10767     int rc = NO_ERROR;
10768 
10769     /*
10770     * Loop through all streams requested in configuration
10771     * Check if unsupported rotations have been requested on any of them
10772     */
10773     for (size_t j = 0; j < streamList->num_streams; j++){
10774         camera3_stream_t *newStream = streamList->streams[j];
10775 
10776         switch(newStream->rotation) {
10777             case CAMERA3_STREAM_ROTATION_0:
10778             case CAMERA3_STREAM_ROTATION_90:
10779             case CAMERA3_STREAM_ROTATION_180:
10780             case CAMERA3_STREAM_ROTATION_270:
10781                 //Expected values
10782                 break;
10783             default:
10784                 LOGE("Error: Unsupported rotation of %d requested for stream"
10785                         "type:%d and stream format:%d",
10786                         newStream->rotation, newStream->stream_type,
10787                         newStream->format);
10788                 return -EINVAL;
10789         }
10790 
10791         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10792         bool isImplDef = (newStream->format ==
10793                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10794         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10795                 isImplDef);
10796 
10797         if (isRotated && (!isImplDef || isZsl)) {
10798             LOGE("Error: Unsupported rotation of %d requested for stream"
10799                     "type:%d and stream format:%d",
10800                     newStream->rotation, newStream->stream_type,
10801                     newStream->format);
10802             rc = -EINVAL;
10803             break;
10804         }
10805     }
10806 
10807     return rc;
10808 }
10809 
10810 /*===========================================================================
10811 * FUNCTION   : getFlashInfo
10812 *
10813 * DESCRIPTION: Retrieve information about whether the device has a flash.
10814 *
10815 * PARAMETERS :
10816 *   @cameraId  : Camera id to query
10817 *   @hasFlash  : Boolean indicating whether there is a flash device
10818 *                associated with given camera
10819 *   @flashNode : If a flash device exists, this will be its device node.
10820 *
10821 * RETURN     :
10822 *   None
10823 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])10824 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10825         bool& hasFlash,
10826         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10827 {
10828     cam_capability_t* camCapability = gCamCapability[cameraId];
10829     if (NULL == camCapability) {
10830         hasFlash = false;
10831         flashNode[0] = '\0';
10832     } else {
10833         hasFlash = camCapability->flash_available;
10834         strlcpy(flashNode,
10835                 (char*)camCapability->flash_dev_name,
10836                 QCAMERA_MAX_FILEPATH_LENGTH);
10837     }
10838 }
10839 
10840 /*===========================================================================
10841 * FUNCTION   : getEepromVersionInfo
10842 *
10843 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
10844 *
10845 * PARAMETERS : None
10846 *
10847 * RETURN     : string describing EEPROM version
10848 *              "\0" if no such info available
10849 *==========================================================================*/
getEepromVersionInfo()10850 const char *QCamera3HardwareInterface::getEepromVersionInfo()
10851 {
10852     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10853 }
10854 
10855 /*===========================================================================
10856 * FUNCTION   : getLdafCalib
10857 *
10858 * DESCRIPTION: Retrieve Laser AF calibration data
10859 *
10860 * PARAMETERS : None
10861 *
10862 * RETURN     : Two uint32_t describing laser AF calibration data
10863 *              NULL if none is available.
10864 *==========================================================================*/
getLdafCalib()10865 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10866 {
10867     if (mLdafCalibExist) {
10868         return &mLdafCalib[0];
10869     } else {
10870         return NULL;
10871     }
10872 }
10873 
10874 /*===========================================================================
10875  * FUNCTION   : dynamicUpdateMetaStreamInfo
10876  *
10877  * DESCRIPTION: This function:
10878  *             (1) stops all the channels
10879  *             (2) returns error on pending requests and buffers
10880  *             (3) sends metastream_info in setparams
10881  *             (4) starts all channels
10882  *             This is useful when sensor has to be restarted to apply any
10883  *             settings such as frame rate from a different sensor mode
10884  *
10885  * PARAMETERS : None
10886  *
10887  * RETURN     : NO_ERROR on success
10888  *              Error codes on failure
10889  *
10890  *==========================================================================*/
dynamicUpdateMetaStreamInfo()10891 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10892 {
10893     ATRACE_CALL();
10894     int rc = NO_ERROR;
10895 
10896     LOGD("E");
10897 
10898     rc = stopAllChannels();
10899     if (rc < 0) {
10900         LOGE("stopAllChannels failed");
10901         return rc;
10902     }
10903 
10904     rc = notifyErrorForPendingRequests();
10905     if (rc < 0) {
10906         LOGE("notifyErrorForPendingRequests failed");
10907         return rc;
10908     }
10909 
10910     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10911         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10912                 "Format:%d",
10913                 mStreamConfigInfo.type[i],
10914                 mStreamConfigInfo.stream_sizes[i].width,
10915                 mStreamConfigInfo.stream_sizes[i].height,
10916                 mStreamConfigInfo.postprocess_mask[i],
10917                 mStreamConfigInfo.format[i]);
10918     }
10919 
10920     /* Send meta stream info once again so that ISP can start */
10921     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10922             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10923     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10924             mParameters);
10925     if (rc < 0) {
10926         LOGE("set Metastreaminfo failed. Sensor mode does not change");
10927     }
10928 
10929     rc = startAllChannels();
10930     if (rc < 0) {
10931         LOGE("startAllChannels failed");
10932         return rc;
10933     }
10934 
10935     LOGD("X");
10936     return rc;
10937 }
10938 
10939 /*===========================================================================
10940  * FUNCTION   : stopAllChannels
10941  *
10942  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10943  *
10944  * PARAMETERS : None
10945  *
10946  * RETURN     : NO_ERROR on success
10947  *              Error codes on failure
10948  *
10949  *==========================================================================*/
stopAllChannels()10950 int32_t QCamera3HardwareInterface::stopAllChannels()
10951 {
10952     int32_t rc = NO_ERROR;
10953 
10954     LOGD("Stopping all channels");
10955     // Stop the Streams/Channels
10956     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10957         it != mStreamInfo.end(); it++) {
10958         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10959         if (channel) {
10960             channel->stop();
10961         }
10962         (*it)->status = INVALID;
10963     }
10964 
10965     if (mSupportChannel) {
10966         mSupportChannel->stop();
10967     }
10968     if (mAnalysisChannel) {
10969         mAnalysisChannel->stop();
10970     }
10971     if (mRawDumpChannel) {
10972         mRawDumpChannel->stop();
10973     }
10974     if (mMetadataChannel) {
10975         /* If content of mStreamInfo is not 0, there is metadata stream */
10976         mMetadataChannel->stop();
10977     }
10978 
10979     LOGD("All channels stopped");
10980     return rc;
10981 }
10982 
10983 /*===========================================================================
10984  * FUNCTION   : startAllChannels
10985  *
10986  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10987  *
10988  * PARAMETERS : None
10989  *
10990  * RETURN     : NO_ERROR on success
10991  *              Error codes on failure
10992  *
10993  *==========================================================================*/
startAllChannels()10994 int32_t QCamera3HardwareInterface::startAllChannels()
10995 {
10996     int32_t rc = NO_ERROR;
10997 
10998     LOGD("Start all channels ");
10999     // Start the Streams/Channels
11000     if (mMetadataChannel) {
11001         /* If content of mStreamInfo is not 0, there is metadata stream */
11002         rc = mMetadataChannel->start();
11003         if (rc < 0) {
11004             LOGE("META channel start failed");
11005             return rc;
11006         }
11007     }
11008     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11009         it != mStreamInfo.end(); it++) {
11010         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11011         if (channel) {
11012             rc = channel->start();
11013             if (rc < 0) {
11014                 LOGE("channel start failed");
11015                 return rc;
11016             }
11017         }
11018     }
11019     if (mAnalysisChannel) {
11020         mAnalysisChannel->start();
11021     }
11022     if (mSupportChannel) {
11023         rc = mSupportChannel->start();
11024         if (rc < 0) {
11025             LOGE("Support channel start failed");
11026             return rc;
11027         }
11028     }
11029     if (mRawDumpChannel) {
11030         rc = mRawDumpChannel->start();
11031         if (rc < 0) {
11032             LOGE("RAW dump channel start failed");
11033             return rc;
11034         }
11035     }
11036 
11037     LOGD("All channels started");
11038     return rc;
11039 }
11040 
11041 /*===========================================================================
11042  * FUNCTION   : notifyErrorForPendingRequests
11043  *
11044  * DESCRIPTION: This function sends error for all the pending requests/buffers
11045  *
11046  * PARAMETERS : None
11047  *
11048  * RETURN     : Error codes
11049  *              NO_ERROR on success
11050  *
11051  *==========================================================================*/
notifyErrorForPendingRequests()11052 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
11053 {
11054     int32_t rc = NO_ERROR;
11055     unsigned int frameNum = 0;
11056     camera3_capture_result_t result;
11057     camera3_stream_buffer_t *pStream_Buf = NULL;
11058 
11059     memset(&result, 0, sizeof(camera3_capture_result_t));
11060 
11061     if (mPendingRequestsList.size() > 0) {
11062         pendingRequestIterator i = mPendingRequestsList.begin();
11063         frameNum = i->frame_number;
11064     } else {
11065         /* There might still be pending buffers even though there are
11066          no pending requests. Setting the frameNum to MAX so that
11067          all the buffers with smaller frame numbers are returned */
11068         frameNum = UINT_MAX;
11069     }
11070 
11071     LOGH("Oldest frame num on mPendingRequestsList = %u",
11072        frameNum);
11073 
11074     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
11075             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
11076 
11077         if (req->frame_number < frameNum) {
11078             // Send Error notify to frameworks for each buffer for which
11079             // metadata buffer is already sent
11080             LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
11081                 req->frame_number, req->mPendingBufferList.size());
11082 
11083             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11084             if (NULL == pStream_Buf) {
11085                 LOGE("No memory for pending buffers array");
11086                 return NO_MEMORY;
11087             }
11088             memset(pStream_Buf, 0,
11089                 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11090             result.result = NULL;
11091             result.frame_number = req->frame_number;
11092             result.num_output_buffers = req->mPendingBufferList.size();
11093             result.output_buffers = pStream_Buf;
11094 
11095             size_t index = 0;
11096             for (auto info = req->mPendingBufferList.begin();
11097                 info != req->mPendingBufferList.end(); ) {
11098 
11099                 camera3_notify_msg_t notify_msg;
11100                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11101                 notify_msg.type = CAMERA3_MSG_ERROR;
11102                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
11103                 notify_msg.message.error.error_stream = info->stream;
11104                 notify_msg.message.error.frame_number = req->frame_number;
11105                 pStream_Buf[index].acquire_fence = -1;
11106                 pStream_Buf[index].release_fence = -1;
11107                 pStream_Buf[index].buffer = info->buffer;
11108                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11109                 pStream_Buf[index].stream = info->stream;
11110                 mCallbackOps->notify(mCallbackOps, &notify_msg);
11111                 index++;
11112                 // Remove buffer from list
11113                 info = req->mPendingBufferList.erase(info);
11114             }
11115 
11116             // Remove this request from Map
11117             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11118                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11119             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11120 
11121             mCallbackOps->process_capture_result(mCallbackOps, &result);
11122 
11123             delete [] pStream_Buf;
11124         } else {
11125 
11126             // Go through the pending requests info and send error request to framework
11127             LOGE("Sending ERROR REQUEST for all pending requests");
11128             pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
11129 
11130             LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
11131 
11132             // Send error notify to frameworks
11133             camera3_notify_msg_t notify_msg;
11134             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11135             notify_msg.type = CAMERA3_MSG_ERROR;
11136             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
11137             notify_msg.message.error.error_stream = NULL;
11138             notify_msg.message.error.frame_number = req->frame_number;
11139             mCallbackOps->notify(mCallbackOps, &notify_msg);
11140 
11141             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11142             if (NULL == pStream_Buf) {
11143                 LOGE("No memory for pending buffers array");
11144                 return NO_MEMORY;
11145             }
11146             memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11147 
11148             result.result = NULL;
11149             result.frame_number = req->frame_number;
11150             result.input_buffer = i->input_buffer;
11151             result.num_output_buffers = req->mPendingBufferList.size();
11152             result.output_buffers = pStream_Buf;
11153 
11154             size_t index = 0;
11155             for (auto info = req->mPendingBufferList.begin();
11156                 info != req->mPendingBufferList.end(); ) {
11157                 pStream_Buf[index].acquire_fence = -1;
11158                 pStream_Buf[index].release_fence = -1;
11159                 pStream_Buf[index].buffer = info->buffer;
11160                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11161                 pStream_Buf[index].stream = info->stream;
11162                 index++;
11163                 // Remove buffer from list
11164                 info = req->mPendingBufferList.erase(info);
11165             }
11166 
11167             // Remove this request from Map
11168             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11169                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11170             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11171 
11172             mCallbackOps->process_capture_result(mCallbackOps, &result);
11173             delete [] pStream_Buf;
11174             i = erasePendingRequest(i);
11175         }
11176     }
11177 
11178     /* Reset pending frame Drop list and requests list */
11179     mPendingFrameDropList.clear();
11180 
11181     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11182         req.mPendingBufferList.clear();
11183     }
11184     mPendingBuffersMap.mPendingBuffersInRequest.clear();
11185     mPendingReprocessResultList.clear();
11186     LOGH("Cleared all the pending buffers ");
11187 
11188     return rc;
11189 }
11190 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)11191 bool QCamera3HardwareInterface::isOnEncoder(
11192         const cam_dimension_t max_viewfinder_size,
11193         uint32_t width, uint32_t height)
11194 {
11195     return (width > (uint32_t)max_viewfinder_size.width ||
11196             height > (uint32_t)max_viewfinder_size.height);
11197 }
11198 
11199 /*===========================================================================
11200  * FUNCTION   : setBundleInfo
11201  *
11202  * DESCRIPTION: Set bundle info for all streams that are bundle.
11203  *
11204  * PARAMETERS : None
11205  *
11206  * RETURN     : NO_ERROR on success
11207  *              Error codes on failure
11208  *==========================================================================*/
setBundleInfo()11209 int32_t QCamera3HardwareInterface::setBundleInfo()
11210 {
11211     int32_t rc = NO_ERROR;
11212 
11213     if (mChannelHandle) {
11214         cam_bundle_config_t bundleInfo;
11215         memset(&bundleInfo, 0, sizeof(bundleInfo));
11216         rc = mCameraHandle->ops->get_bundle_info(
11217                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11218         if (rc != NO_ERROR) {
11219             LOGE("get_bundle_info failed");
11220             return rc;
11221         }
11222         if (mAnalysisChannel) {
11223             mAnalysisChannel->setBundleInfo(bundleInfo);
11224         }
11225         if (mSupportChannel) {
11226             mSupportChannel->setBundleInfo(bundleInfo);
11227         }
11228         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11229                 it != mStreamInfo.end(); it++) {
11230             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11231             channel->setBundleInfo(bundleInfo);
11232         }
11233         if (mRawDumpChannel) {
11234             mRawDumpChannel->setBundleInfo(bundleInfo);
11235         }
11236     }
11237 
11238     return rc;
11239 }
11240 
11241 /*===========================================================================
11242  * FUNCTION   : get_num_overall_buffers
11243  *
11244  * DESCRIPTION: Estimate number of pending buffers across all requests.
11245  *
11246  * PARAMETERS : None
11247  *
11248  * RETURN     : Number of overall pending buffers
11249  *
11250  *==========================================================================*/
get_num_overall_buffers()11251 uint32_t PendingBuffersMap::get_num_overall_buffers()
11252 {
11253     uint32_t sum_buffers = 0;
11254     for (auto &req : mPendingBuffersInRequest) {
11255         sum_buffers += req.mPendingBufferList.size();
11256     }
11257     return sum_buffers;
11258 }
11259 
11260 /*===========================================================================
11261  * FUNCTION   : removeBuf
11262  *
11263  * DESCRIPTION: Remove a matching buffer from tracker.
11264  *
11265  * PARAMETERS : @buffer: image buffer for the callback
11266  *
11267  * RETURN     : None
11268  *
11269  *==========================================================================*/
removeBuf(buffer_handle_t * buffer)11270 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11271 {
11272     bool buffer_found = false;
11273     for (auto req = mPendingBuffersInRequest.begin();
11274             req != mPendingBuffersInRequest.end(); req++) {
11275         for (auto k = req->mPendingBufferList.begin();
11276                 k != req->mPendingBufferList.end(); k++ ) {
11277             if (k->buffer == buffer) {
11278                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11279                         req->frame_number, buffer);
11280                 k = req->mPendingBufferList.erase(k);
11281                 if (req->mPendingBufferList.empty()) {
11282                     // Remove this request from Map
11283                     req = mPendingBuffersInRequest.erase(req);
11284                 }
11285                 buffer_found = true;
11286                 break;
11287             }
11288         }
11289         if (buffer_found) {
11290             break;
11291         }
11292     }
11293     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11294             get_num_overall_buffers());
11295 }
11296 
11297 /*===========================================================================
11298  * FUNCTION   : getBufErrStatus
11299  *
11300  * DESCRIPTION: get buffer error status
11301  *
11302  * PARAMETERS : @buffer: buffer handle
11303  *
11304  * RETURN     : None
11305  *
11306  *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)11307 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11308 {
11309     for (auto& req : mPendingBuffersInRequest) {
11310         for (auto& k : req.mPendingBufferList) {
11311             if (k.buffer == buffer)
11312                 return k.bufStatus;
11313         }
11314     }
11315     return CAMERA3_BUFFER_STATUS_OK;
11316 }
11317 
11318 /*===========================================================================
11319  * FUNCTION   : setPAAFSupport
11320  *
11321  * DESCRIPTION: Set the preview-assisted auto focus support bit in
11322  *              feature mask according to stream type and filter
11323  *              arrangement
11324  *
11325  * PARAMETERS : @feature_mask: current feature mask, which may be modified
11326  *              @stream_type: stream type
11327  *              @filter_arrangement: filter arrangement
11328  *
11329  * RETURN     : None
11330  *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)11331 void QCamera3HardwareInterface::setPAAFSupport(
11332         cam_feature_mask_t& feature_mask,
11333         cam_stream_type_t stream_type,
11334         cam_color_filter_arrangement_t filter_arrangement)
11335 {
11336     switch (filter_arrangement) {
11337     case CAM_FILTER_ARRANGEMENT_RGGB:
11338     case CAM_FILTER_ARRANGEMENT_GRBG:
11339     case CAM_FILTER_ARRANGEMENT_GBRG:
11340     case CAM_FILTER_ARRANGEMENT_BGGR:
11341         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11342                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11343             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11344         }
11345         break;
11346     case CAM_FILTER_ARRANGEMENT_Y:
11347         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11348             feature_mask |= CAM_QCOM_FEATURE_PAAF;
11349         }
11350         break;
11351     default:
11352         break;
11353     }
11354     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11355             feature_mask, stream_type, filter_arrangement);
11356 
11357 
11358 }
11359 
11360 /*===========================================================================
11361  * FUNCTION   : adjustBlackLevelForCFA
11362  *
11363  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11364  *              of bayer CFA (Color Filter Array).
11365  *
11366  * PARAMETERS : @input: black level pattern in the order of RGGB
11367  *              @output: black level pattern in the order of CFA
11368  *              @color_arrangement: CFA color arrangement
11369  *
11370  * RETURN     : None
11371  *==========================================================================*/
11372 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)11373 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11374         T input[BLACK_LEVEL_PATTERN_CNT],
11375         T output[BLACK_LEVEL_PATTERN_CNT],
11376         cam_color_filter_arrangement_t color_arrangement)
11377 {
11378     switch (color_arrangement) {
11379     case CAM_FILTER_ARRANGEMENT_GRBG:
11380         output[0] = input[1];
11381         output[1] = input[0];
11382         output[2] = input[3];
11383         output[3] = input[2];
11384         break;
11385     case CAM_FILTER_ARRANGEMENT_GBRG:
11386         output[0] = input[2];
11387         output[1] = input[3];
11388         output[2] = input[0];
11389         output[3] = input[1];
11390         break;
11391     case CAM_FILTER_ARRANGEMENT_BGGR:
11392         output[0] = input[3];
11393         output[1] = input[2];
11394         output[2] = input[1];
11395         output[3] = input[0];
11396         break;
11397     case CAM_FILTER_ARRANGEMENT_RGGB:
11398         output[0] = input[0];
11399         output[1] = input[1];
11400         output[2] = input[2];
11401         output[3] = input[3];
11402         break;
11403     default:
11404         LOGE("Invalid color arrangement to derive dynamic blacklevel");
11405         break;
11406     }
11407 }
11408 
11409 /*===========================================================================
11410  * FUNCTION   : is60HzZone
11411  *
11412  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11413  *
11414  * PARAMETERS : None
11415  *
11416  * RETURN     : True if in 60Hz zone, False otherwise
11417  *==========================================================================*/
is60HzZone()11418 bool QCamera3HardwareInterface::is60HzZone()
11419 {
11420     time_t t = time(NULL);
11421     struct tm lt;
11422 
11423     struct tm* r = localtime_r(&t, &lt);
11424 
11425     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11426         return true;
11427     else
11428         return false;
11429 }
11430 }; //end namespace qcamera
11431