• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33 
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <sync/sync.h>
46 #include <gralloc_priv.h>
47 #include "util/QCameraFlash.h"
48 #include "QCamera3HWI.h"
49 #include "QCamera3Mem.h"
50 #include "QCamera3Channel.h"
51 #include "QCamera3PostProc.h"
52 #include "QCamera3VendorTags.h"
53 
54 using namespace android;
55 
56 namespace qcamera {
57 
58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59 
60 #define EMPTY_PIPELINE_DELAY 2
61 #define PARTIAL_RESULT_COUNT 2
62 #define FRAME_SKIP_DELAY     0
63 #define CAM_MAX_SYNC_LATENCY 4
64 
65 #define MAX_VALUE_8BIT ((1<<8)-1)
66 #define MAX_VALUE_10BIT ((1<<10)-1)
67 #define MAX_VALUE_12BIT ((1<<12)-1)
68 
69 #define VIDEO_4K_WIDTH  3840
70 #define VIDEO_4K_HEIGHT 2160
71 
72 #define MAX_EIS_WIDTH 1920
73 #define MAX_EIS_HEIGHT 1080
74 
75 #define MAX_RAW_STREAMS        1
76 #define MAX_STALLING_STREAMS   1
77 #define MAX_PROCESSED_STREAMS  3
78 /* Batch mode is enabled only if FPS set is equal to or greater than this */
79 #define MIN_FPS_FOR_BATCH_MODE (120)
80 #define PREVIEW_FPS_FOR_HFR    (30)
81 #define DEFAULT_VIDEO_FPS      (30.0)
82 #define MAX_HFR_BATCH_SIZE     (8)
83 #define REGIONS_TUPLE_COUNT    5
84 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
85 
86 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87 
88 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
89                                               CAM_QCOM_FEATURE_CROP |\
90                                               CAM_QCOM_FEATURE_ROTATION |\
91                                               CAM_QCOM_FEATURE_SHARPNESS |\
92                                               CAM_QCOM_FEATURE_SCALE |\
93                                               CAM_QCOM_FEATURE_CAC |\
94                                               CAM_QCOM_FEATURE_CDS )
95 
96 #define TIMEOUT_NEVER -1
97 
98 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101 volatile uint32_t gCamHal3LogLevel = 1;
102 
103 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104     {"On",  CAM_CDS_MODE_ON},
105     {"Off", CAM_CDS_MODE_OFF},
106     {"Auto",CAM_CDS_MODE_AUTO}
107 };
108 
109 const QCamera3HardwareInterface::QCameraMap<
110         camera_metadata_enum_android_control_effect_mode_t,
111         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
113     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
114     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
115     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
116     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
117     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
118     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
121 };
122 
123 const QCamera3HardwareInterface::QCameraMap<
124         camera_metadata_enum_android_control_awb_mode_t,
125         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
127     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
128     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
129     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
130     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
132     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
134     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
135 };
136 
137 const QCamera3HardwareInterface::QCameraMap<
138         camera_metadata_enum_android_control_scene_mode_t,
139         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
141     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
142     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
143     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
144     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
145     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
147     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
148     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
149     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
150     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
151     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
152     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
153     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
154     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
155     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
156 };
157 
158 const QCamera3HardwareInterface::QCameraMap<
159         camera_metadata_enum_android_control_af_mode_t,
160         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
162     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
163     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
164     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
165     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
166     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168 };
169 
170 const QCamera3HardwareInterface::QCameraMap<
171         camera_metadata_enum_android_color_correction_aberration_mode_t,
172         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174             CAM_COLOR_CORRECTION_ABERRATION_OFF },
175     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176             CAM_COLOR_CORRECTION_ABERRATION_FAST },
177     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179 };
180 
181 const QCamera3HardwareInterface::QCameraMap<
182         camera_metadata_enum_android_control_ae_antibanding_mode_t,
183         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
185     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188 };
189 
190 const QCamera3HardwareInterface::QCameraMap<
191         camera_metadata_enum_android_control_ae_mode_t,
192         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
194     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
195     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
196     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
197     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198 };
199 
200 const QCamera3HardwareInterface::QCameraMap<
201         camera_metadata_enum_android_flash_mode_t,
202         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
204     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
206 };
207 
208 const QCamera3HardwareInterface::QCameraMap<
209         camera_metadata_enum_android_statistics_face_detect_mode_t,
210         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
212     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
213     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214 };
215 
216 const QCamera3HardwareInterface::QCameraMap<
217         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220       CAM_FOCUS_UNCALIBRATED },
221     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222       CAM_FOCUS_APPROXIMATE },
223     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224       CAM_FOCUS_CALIBRATED }
225 };
226 
227 const QCamera3HardwareInterface::QCameraMap<
228         camera_metadata_enum_android_lens_state_t,
229         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232 };
233 
234 const int32_t available_thumbnail_sizes[] = {0, 0,
235                                              176, 144,
236                                              320, 240,
237                                              432, 288,
238                                              480, 288,
239                                              512, 288,
240                                              512, 384};
241 
242 const QCamera3HardwareInterface::QCameraMap<
243         camera_metadata_enum_android_sensor_test_pattern_mode_t,
244         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250 };
251 
252 /* Since there is no mapping for all the options some Android enum are not listed.
253  * Also, the order in this list is important because while mapping from HAL to Android it will
254  * traverse from lower to higher index which means that for HAL values that are map to different
255  * Android values, the traverse logic will select the first one found.
256  */
257 const QCamera3HardwareInterface::QCameraMap<
258         camera_metadata_enum_android_sensor_reference_illuminant1_t,
259         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276 };
277 
278 const QCamera3HardwareInterface::QCameraMap<
279         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280     { 60, CAM_HFR_MODE_60FPS},
281     { 90, CAM_HFR_MODE_90FPS},
282     { 120, CAM_HFR_MODE_120FPS},
283     { 150, CAM_HFR_MODE_150FPS},
284     { 180, CAM_HFR_MODE_180FPS},
285     { 210, CAM_HFR_MODE_210FPS},
286     { 240, CAM_HFR_MODE_240FPS},
287     { 480, CAM_HFR_MODE_480FPS},
288 };
289 
290 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291     initialize:                         QCamera3HardwareInterface::initialize,
292     configure_streams:                  QCamera3HardwareInterface::configure_streams,
293     register_stream_buffers:            NULL,
294     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296     get_metadata_vendor_tag_ops:        NULL,
297     dump:                               QCamera3HardwareInterface::dump,
298     flush:                              QCamera3HardwareInterface::flush,
299     reserved:                           {0},
300 };
301 
302 /*===========================================================================
303  * FUNCTION   : QCamera3HardwareInterface
304  *
305  * DESCRIPTION: constructor of QCamera3HardwareInterface
306  *
307  * PARAMETERS :
308  *   @cameraId  : camera ID
309  *
310  * RETURN     : none
311  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)312 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313         const camera_module_callbacks_t *callbacks)
314     : mCameraId(cameraId),
315       mCameraHandle(NULL),
316       mCameraOpened(false),
317       mCameraInitialized(false),
318       mCallbackOps(NULL),
319       mMetadataChannel(NULL),
320       mPictureChannel(NULL),
321       mRawChannel(NULL),
322       mSupportChannel(NULL),
323       mAnalysisChannel(NULL),
324       mRawDumpChannel(NULL),
325       mDummyBatchChannel(NULL),
326       mChannelHandle(0),
327       mFirstRequest(false),
328       mFirstConfiguration(true),
329       mFlush(false),
330       mParamHeap(NULL),
331       mParameters(NULL),
332       mPrevParameters(NULL),
333       m_bIsVideo(false),
334       m_bIs4KVideo(false),
335       m_bEisSupportedSize(false),
336       m_bEisEnable(false),
337       m_MobicatMask(0),
338       mMinProcessedFrameDuration(0),
339       mMinJpegFrameDuration(0),
340       mMinRawFrameDuration(0),
341       mMetaFrameCount(0U),
342       mUpdateDebugLevel(false),
343       mCallbacks(callbacks),
344       mCaptureIntent(0),
345       mBatchSize(0),
346       mToBeQueuedVidBufs(0),
347       mHFRVideoFps(DEFAULT_VIDEO_FPS),
348       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349       mFirstFrameNumberInBatch(0),
350       mNeedSensorRestart(false),
351       mLdafCalibExist(false),
352       mPowerHintEnabled(false),
353       mLastCustIntentFrmNum(-1)
354 {
355     getLogLevel();
356     m_perfLock.lock_init();
357     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
358     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
359     mCameraDevice.common.close = close_camera_device;
360     mCameraDevice.ops = &mCameraOps;
361     mCameraDevice.priv = this;
362     gCamCapability[cameraId]->version = CAM_HAL_V3;
363     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
364     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
365     gCamCapability[cameraId]->min_num_pp_bufs = 3;
366     pthread_cond_init(&mRequestCond, NULL);
367     mPendingLiveRequest = 0;
368     mCurrentRequestId = -1;
369     pthread_mutex_init(&mMutex, NULL);
370 
371     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
372         mDefaultMetadata[i] = NULL;
373 
374     // Getting system props of different kinds
375     char prop[PROPERTY_VALUE_MAX];
376     memset(prop, 0, sizeof(prop));
377     property_get("persist.camera.raw.dump", prop, "0");
378     mEnableRawDump = atoi(prop);
379     if (mEnableRawDump)
380         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
381 
382     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
383     memset(mLdafCalib, 0, sizeof(mLdafCalib));
384 
385     memset(prop, 0, sizeof(prop));
386     property_get("persist.camera.tnr.preview", prop, "1");
387     m_bTnrPreview = (uint8_t)atoi(prop);
388 
389     memset(prop, 0, sizeof(prop));
390     property_get("persist.camera.tnr.video", prop, "1");
391     m_bTnrVideo = (uint8_t)atoi(prop);
392 }
393 
394 /*===========================================================================
395  * FUNCTION   : ~QCamera3HardwareInterface
396  *
397  * DESCRIPTION: destructor of QCamera3HardwareInterface
398  *
399  * PARAMETERS : none
400  *
401  * RETURN     : none
402  *==========================================================================*/
~QCamera3HardwareInterface()403 QCamera3HardwareInterface::~QCamera3HardwareInterface()
404 {
405     CDBG("%s: E", __func__);
406 
407     /* Turn off current power hint before acquiring perfLock in case they
408      * conflict with each other */
409     disablePowerHint();
410 
411     m_perfLock.lock_acq();
412 
413     /* We need to stop all streams before deleting any stream */
414     if (mRawDumpChannel) {
415         mRawDumpChannel->stop();
416     }
417 
418     // NOTE: 'camera3_stream_t *' objects are already freed at
419     //        this stage by the framework
420     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
421         it != mStreamInfo.end(); it++) {
422         QCamera3ProcessingChannel *channel = (*it)->channel;
423         if (channel) {
424             channel->stop();
425         }
426     }
427     if (mSupportChannel)
428         mSupportChannel->stop();
429 
430     if (mAnalysisChannel) {
431         mAnalysisChannel->stop();
432     }
433     if (mMetadataChannel) {
434         mMetadataChannel->stop();
435     }
436     if (mChannelHandle) {
437         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
438                 mChannelHandle);
439         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
440     }
441 
442     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
443         it != mStreamInfo.end(); it++) {
444         QCamera3ProcessingChannel *channel = (*it)->channel;
445         if (channel)
446             delete channel;
447         free (*it);
448     }
449     if (mSupportChannel) {
450         delete mSupportChannel;
451         mSupportChannel = NULL;
452     }
453 
454     if (mAnalysisChannel) {
455         delete mAnalysisChannel;
456         mAnalysisChannel = NULL;
457     }
458     if (mRawDumpChannel) {
459         delete mRawDumpChannel;
460         mRawDumpChannel = NULL;
461     }
462     if (mDummyBatchChannel) {
463         delete mDummyBatchChannel;
464         mDummyBatchChannel = NULL;
465     }
466     mPictureChannel = NULL;
467 
468     if (mMetadataChannel) {
469         delete mMetadataChannel;
470         mMetadataChannel = NULL;
471     }
472 
473     /* Clean up all channels */
474     if (mCameraInitialized) {
475         if(!mFirstConfiguration){
476             //send the last unconfigure
477             cam_stream_size_info_t stream_config_info;
478             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
479             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
480             stream_config_info.buffer_info.max_buffers =
481                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
482             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
483                     stream_config_info);
484             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
485             if (rc < 0) {
486                 ALOGE("%s: set_parms failed for unconfigure", __func__);
487             }
488         }
489         deinitParameters();
490     }
491 
492     if (mChannelHandle) {
493         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
494                 mChannelHandle);
495         ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
496         mChannelHandle = 0;
497     }
498 
499     if (mCameraOpened)
500         closeCamera();
501 
502     mPendingBuffersMap.mPendingBufferList.clear();
503     mPendingReprocessResultList.clear();
504     for (pendingRequestIterator i = mPendingRequestsList.begin();
505             i != mPendingRequestsList.end();) {
506         i = erasePendingRequest(i);
507     }
508     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
509         if (mDefaultMetadata[i])
510             free_camera_metadata(mDefaultMetadata[i]);
511 
512     m_perfLock.lock_rel();
513     m_perfLock.lock_deinit();
514 
515     pthread_cond_destroy(&mRequestCond);
516 
517     pthread_mutex_destroy(&mMutex);
518     CDBG("%s: X", __func__);
519 }
520 
521 /*===========================================================================
522  * FUNCTION   : erasePendingRequest
523  *
524  * DESCRIPTION: function to erase a desired pending request after freeing any
525  *              allocated memory
526  *
527  * PARAMETERS :
528  *   @i       : iterator pointing to pending request to be erased
529  *
530  * RETURN     : iterator pointing to the next request
531  *==========================================================================*/
532 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)533         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
534 {
535     if (i->input_buffer != NULL) {
536         free(i->input_buffer);
537         i->input_buffer = NULL;
538     }
539     if (i->settings != NULL)
540         free_camera_metadata((camera_metadata_t*)i->settings);
541     return mPendingRequestsList.erase(i);
542 }
543 
544 /*===========================================================================
545  * FUNCTION   : camEvtHandle
546  *
547  * DESCRIPTION: Function registered to mm-camera-interface to handle events
548  *
549  * PARAMETERS :
550  *   @camera_handle : interface layer camera handle
551  *   @evt           : ptr to event
552  *   @user_data     : user data ptr
553  *
554  * RETURN     : none
555  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)556 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
557                                           mm_camera_event_t *evt,
558                                           void *user_data)
559 {
560     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
561     if (obj && evt) {
562         switch(evt->server_event_type) {
563             case CAM_EVENT_TYPE_DAEMON_DIED:
564                 ALOGE("%s: Fatal, camera daemon died", __func__);
565                 //close the camera backend
566                 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
567                         && obj->mCameraHandle->ops) {
568                     obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
569                 } else {
570                     ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
571                             __func__);
572                 }
573                 camera3_notify_msg_t notify_msg;
574                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
575                 notify_msg.type = CAMERA3_MSG_ERROR;
576                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
577                 notify_msg.message.error.error_stream = NULL;
578                 notify_msg.message.error.frame_number = 0;
579                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
580                 break;
581 
582             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
583                 CDBG("%s: HAL got request pull from Daemon", __func__);
584                 pthread_mutex_lock(&obj->mMutex);
585                 obj->mWokenUpByDaemon = true;
586                 obj->unblockRequestIfNecessary();
587                 pthread_mutex_unlock(&obj->mMutex);
588                 break;
589 
590             default:
591                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
592                         evt->server_event_type);
593                 break;
594         }
595     } else {
596         ALOGE("%s: NULL user_data/evt", __func__);
597     }
598 }
599 
600 /*===========================================================================
601  * FUNCTION   : openCamera
602  *
603  * DESCRIPTION: open camera
604  *
605  * PARAMETERS :
606  *   @hw_device  : double ptr for camera device struct
607  *
608  * RETURN     : int32_t type of status
609  *              NO_ERROR  -- success
610  *              none-zero failure code
611  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)612 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
613 {
614     int rc = 0;
615     if (mCameraOpened) {
616         *hw_device = NULL;
617         return PERMISSION_DENIED;
618     }
619     m_perfLock.lock_acq();
620     rc = openCamera();
621     if (rc == 0) {
622         *hw_device = &mCameraDevice.common;
623     } else
624         *hw_device = NULL;
625 
626     m_perfLock.lock_rel();
627     return rc;
628 }
629 
630 /*===========================================================================
631  * FUNCTION   : openCamera
632  *
633  * DESCRIPTION: open camera
634  *
635  * PARAMETERS : none
636  *
637  * RETURN     : int32_t type of status
638  *              NO_ERROR  -- success
639  *              none-zero failure code
640  *==========================================================================*/
openCamera()641 int QCamera3HardwareInterface::openCamera()
642 {
643     int rc = 0;
644 
645     ATRACE_CALL();
646     if (mCameraHandle) {
647         ALOGE("Failure: Camera already opened");
648         return ALREADY_EXISTS;
649     }
650 
651     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
652     if (rc < 0) {
653         ALOGE("%s: Failed to reserve flash for camera id: %d",
654                 __func__,
655                 mCameraId);
656         return UNKNOWN_ERROR;
657     }
658 
659     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
660     if (rc) {
661         ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
662         return rc;
663     }
664 
665     mCameraOpened = true;
666 
667     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
668             camEvtHandle, (void *)this);
669 
670     if (rc < 0) {
671         ALOGE("%s: Error, failed to register event callback", __func__);
672         /* Not closing camera here since it is already handled in destructor */
673         return FAILED_TRANSACTION;
674     }
675     mFirstConfiguration = true;
676     return NO_ERROR;
677 }
678 
679 /*===========================================================================
680  * FUNCTION   : closeCamera
681  *
682  * DESCRIPTION: close camera
683  *
684  * PARAMETERS : none
685  *
686  * RETURN     : int32_t type of status
687  *              NO_ERROR  -- success
688  *              none-zero failure code
689  *==========================================================================*/
closeCamera()690 int QCamera3HardwareInterface::closeCamera()
691 {
692     ATRACE_CALL();
693     int rc = NO_ERROR;
694 
695     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
696     mCameraHandle = NULL;
697     mCameraOpened = false;
698 
699     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
700         CDBG("%s: Failed to release flash for camera id: %d",
701                 __func__,
702                 mCameraId);
703     }
704 
705     return rc;
706 }
707 
708 /*===========================================================================
709  * FUNCTION   : initialize
710  *
711  * DESCRIPTION: Initialize frameworks callback functions
712  *
713  * PARAMETERS :
714  *   @callback_ops : callback function to frameworks
715  *
716  * RETURN     :
717  *
718  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)719 int QCamera3HardwareInterface::initialize(
720         const struct camera3_callback_ops *callback_ops)
721 {
722     ATRACE_CALL();
723     int rc;
724 
725     pthread_mutex_lock(&mMutex);
726 
727     rc = initParameters();
728     if (rc < 0) {
729         ALOGE("%s: initParamters failed %d", __func__, rc);
730        goto err1;
731     }
732     mCallbackOps = callback_ops;
733 
734     mChannelHandle = mCameraHandle->ops->add_channel(
735             mCameraHandle->camera_handle, NULL, NULL, this);
736     if (mChannelHandle == 0) {
737         ALOGE("%s: add_channel failed", __func__);
738         rc = -ENOMEM;
739         pthread_mutex_unlock(&mMutex);
740         return rc;
741     }
742 
743     pthread_mutex_unlock(&mMutex);
744     mCameraInitialized = true;
745     return 0;
746 
747 err1:
748     pthread_mutex_unlock(&mMutex);
749     return rc;
750 }
751 
752 /*===========================================================================
753  * FUNCTION   : validateStreamDimensions
754  *
755  * DESCRIPTION: Check if the configuration requested are those advertised
756  *
757  * PARAMETERS :
758  *   @stream_list : streams to be configured
759  *
760  * RETURN     :
761  *
762  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)763 int QCamera3HardwareInterface::validateStreamDimensions(
764         camera3_stream_configuration_t *streamList)
765 {
766     int rc = NO_ERROR;
767     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
768     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
769     size_t count = 0;
770 
771     camera3_stream_t *inputStream = NULL;
772     /*
773     * Loop through all streams to find input stream if it exists*
774     */
775     for (size_t i = 0; i< streamList->num_streams; i++) {
776         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
777             if (inputStream != NULL) {
778                 ALOGE("%s: Error, Multiple input streams requested");
779                 return -EINVAL;
780             }
781             inputStream = streamList->streams[i];
782         }
783     }
784     /*
785     * Loop through all streams requested in configuration
786     * Check if unsupported sizes have been requested on any of them
787     */
788     for (size_t j = 0; j < streamList->num_streams; j++) {
789         bool sizeFound = false;
790         size_t jpeg_sizes_cnt = 0;
791         camera3_stream_t *newStream = streamList->streams[j];
792 
793         uint32_t rotatedHeight = newStream->height;
794         uint32_t rotatedWidth = newStream->width;
795         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
796                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
797             rotatedHeight = newStream->width;
798             rotatedWidth = newStream->height;
799         }
800 
801         /*
802         * Sizes are different for each type of stream format check against
803         * appropriate table.
804         */
805         switch (newStream->format) {
806         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
807         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
808         case HAL_PIXEL_FORMAT_RAW10:
809             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
810             for (size_t i = 0; i < count; i++) {
811                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
812                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
813                     sizeFound = true;
814                     break;
815                 }
816             }
817             break;
818         case HAL_PIXEL_FORMAT_BLOB:
819             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
820             /* Generate JPEG sizes table */
821             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
822                     count,
823                     MAX_SIZES_CNT,
824                     available_processed_sizes);
825             jpeg_sizes_cnt = filterJpegSizes(
826                     available_jpeg_sizes,
827                     available_processed_sizes,
828                     count * 2,
829                     MAX_SIZES_CNT * 2,
830                     gCamCapability[mCameraId]->active_array_size,
831                     gCamCapability[mCameraId]->max_downscale_factor);
832 
833             /* Verify set size against generated sizes table */
834             for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
835                 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
836                         ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
837                     sizeFound = true;
838                     break;
839                 }
840             }
841             break;
842         case HAL_PIXEL_FORMAT_YCbCr_420_888:
843         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
844         default:
845             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
846                     || newStream->stream_type == CAMERA3_STREAM_INPUT
847                     || IS_USAGE_ZSL(newStream->usage)) {
848                 if (((int32_t)rotatedWidth ==
849                                 gCamCapability[mCameraId]->active_array_size.width) &&
850                                 ((int32_t)rotatedHeight ==
851                                 gCamCapability[mCameraId]->active_array_size.height)) {
852                     sizeFound = true;
853                     break;
854                 }
855                 /* We could potentially break here to enforce ZSL stream
856                  * set from frameworks always is full active array size
857                  * but it is not clear from the spc if framework will always
858                  * follow that, also we have logic to override to full array
859                  * size, so keeping the logic lenient at the moment
860                  */
861             }
862             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
863                     MAX_SIZES_CNT);
864             for (size_t i = 0; i < count; i++) {
865                 if (((int32_t)rotatedWidth ==
866                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
867                             ((int32_t)rotatedHeight ==
868                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
869                     sizeFound = true;
870                     break;
871                 }
872             }
873             break;
874         } /* End of switch(newStream->format) */
875 
876         /* We error out even if a single stream has unsupported size set */
877         if (!sizeFound) {
878             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
879                   "type:%d", __func__, rotatedWidth, rotatedHeight,
880                   newStream->format);
881             ALOGE("%s: Active array size is  %d x %d", __func__,
882                     gCamCapability[mCameraId]->active_array_size.width,
883                     gCamCapability[mCameraId]->active_array_size.height);
884             rc = -EINVAL;
885             break;
886         }
887     } /* End of for each stream */
888     return rc;
889 }
890 
891 /*==============================================================================
892  * FUNCTION   : isSupportChannelNeeded
893  *
894  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
895  *
896  * PARAMETERS :
897  *   @stream_list : streams to be configured
898  *   @stream_config_info : the config info for streams to be configured
899  *
900  * RETURN     : Boolen true/false decision
901  *
902  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)903 bool QCamera3HardwareInterface::isSupportChannelNeeded(
904         camera3_stream_configuration_t *streamList,
905         cam_stream_size_info_t stream_config_info)
906 {
907     uint32_t i;
908     bool pprocRequested = false;
909     /* Check for conditions where PProc pipeline does not have any streams*/
910     for (i = 0; i < stream_config_info.num_streams; i++) {
911         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
912                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
913             pprocRequested = true;
914             break;
915         }
916     }
917 
918     if (pprocRequested == false )
919         return true;
920 
921     /* Dummy stream needed if only raw or jpeg streams present */
922     for (i = 0; i < streamList->num_streams; i++) {
923         switch(streamList->streams[i]->format) {
924             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
925             case HAL_PIXEL_FORMAT_RAW10:
926             case HAL_PIXEL_FORMAT_RAW16:
927             case HAL_PIXEL_FORMAT_BLOB:
928                 break;
929             default:
930                 return false;
931         }
932     }
933     return true;
934 }
935 
936 /*==============================================================================
937  * FUNCTION   : getSensorOutputSize
938  *
939  * DESCRIPTION: Get sensor output size based on current stream configuratoin
940  *
941  * PARAMETERS :
942  *   @sensor_dim : sensor output dimension (output)
943  *
944  * RETURN     : int32_t type of status
945  *              NO_ERROR  -- success
946  *              none-zero failure code
947  *
948  *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)949 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
950 {
951     int32_t rc = NO_ERROR;
952 
953     cam_dimension_t max_dim = {0, 0};
954     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
955         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
956             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
957         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
958             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
959     }
960 
961     clear_metadata_buffer(mParameters);
962 
963     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
964             max_dim);
965     if (rc != NO_ERROR) {
966         ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
967         return rc;
968     }
969 
970     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
971     if (rc != NO_ERROR) {
972         ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
973         return rc;
974     }
975 
976     clear_metadata_buffer(mParameters);
977     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
978 
979     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
980             mParameters);
981     if (rc != NO_ERROR) {
982         ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
983         return rc;
984     }
985 
986     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
987     ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
988 
989     return rc;
990 }
991 
992 /*==============================================================================
993  * FUNCTION   : enablePowerHint
994  *
995  * DESCRIPTION: enable single powerhint for preview and different video modes.
996  *
997  * PARAMETERS :
998  *
999  * RETURN     : NULL
1000  *
1001  *==========================================================================*/
enablePowerHint()1002 void QCamera3HardwareInterface::enablePowerHint()
1003 {
1004     if (!mPowerHintEnabled) {
1005         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1006         mPowerHintEnabled = true;
1007     }
1008 }
1009 
1010 /*==============================================================================
1011  * FUNCTION   : disablePowerHint
1012  *
1013  * DESCRIPTION: disable current powerhint.
1014  *
1015  * PARAMETERS :
1016  *
1017  * RETURN     : NULL
1018  *
1019  *==========================================================================*/
disablePowerHint()1020 void QCamera3HardwareInterface::disablePowerHint()
1021 {
1022     if (mPowerHintEnabled) {
1023         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1024         mPowerHintEnabled = false;
1025     }
1026 }
1027 
1028 /*===========================================================================
1029  * FUNCTION   : configureStreams
1030  *
1031  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1032  *              and output streams.
1033  *
1034  * PARAMETERS :
1035  *   @stream_list : streams to be configured
1036  *
1037  * RETURN     :
1038  *
1039  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1040 int QCamera3HardwareInterface::configureStreams(
1041         camera3_stream_configuration_t *streamList)
1042 {
1043     ATRACE_CALL();
1044     int rc = 0;
1045 
1046     // Acquire perfLock before configure streams
1047     m_perfLock.lock_acq();
1048     rc = configureStreamsPerfLocked(streamList);
1049     m_perfLock.lock_rel();
1050 
1051     return rc;
1052 }
1053 
1054 /*===========================================================================
1055  * FUNCTION   : configureStreamsPerfLocked
1056  *
1057  * DESCRIPTION: configureStreams while perfLock is held.
1058  *
1059  * PARAMETERS :
1060  *   @stream_list : streams to be configured
1061  *
1062  * RETURN     : int32_t type of status
1063  *              NO_ERROR  -- success
1064  *              none-zero failure code
1065  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1066 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1067         camera3_stream_configuration_t *streamList)
1068 {
1069     ATRACE_CALL();
1070     int rc = 0;
1071 
1072     // Sanity check stream_list
1073     if (streamList == NULL) {
1074         ALOGE("%s: NULL stream configuration", __func__);
1075         return BAD_VALUE;
1076     }
1077     if (streamList->streams == NULL) {
1078         ALOGE("%s: NULL stream list", __func__);
1079         return BAD_VALUE;
1080     }
1081 
1082     if (streamList->num_streams < 1) {
1083         ALOGE("%s: Bad number of streams requested: %d", __func__,
1084                 streamList->num_streams);
1085         return BAD_VALUE;
1086     }
1087 
1088     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1089         ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1090                 MAX_NUM_STREAMS, streamList->num_streams);
1091         return BAD_VALUE;
1092     }
1093 
1094     mOpMode = streamList->operation_mode;
1095     CDBG("%s: mOpMode: %d", __func__, mOpMode);
1096 
1097     /* first invalidate all the steams in the mStreamList
1098      * if they appear again, they will be validated */
1099     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1100             it != mStreamInfo.end(); it++) {
1101         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1102         if (channel) {
1103           channel->stop();
1104         }
1105         (*it)->status = INVALID;
1106     }
1107 
1108     if (mRawDumpChannel) {
1109         mRawDumpChannel->stop();
1110         delete mRawDumpChannel;
1111         mRawDumpChannel = NULL;
1112     }
1113 
1114     if (mSupportChannel)
1115         mSupportChannel->stop();
1116 
1117     if (mAnalysisChannel) {
1118         mAnalysisChannel->stop();
1119     }
1120     if (mMetadataChannel) {
1121         /* If content of mStreamInfo is not 0, there is metadata stream */
1122         mMetadataChannel->stop();
1123     }
1124     if (mChannelHandle) {
1125         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1126                 mChannelHandle);
1127         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1128     }
1129 
1130     pthread_mutex_lock(&mMutex);
1131 
1132     /* Check whether we have video stream */
1133     m_bIs4KVideo = false;
1134     m_bIsVideo = false;
1135     m_bEisSupportedSize = false;
1136     m_bTnrEnabled = false;
1137     bool isZsl = false;
1138     uint32_t videoWidth = 0U;
1139     uint32_t videoHeight = 0U;
1140     size_t rawStreamCnt = 0;
1141     size_t stallStreamCnt = 0;
1142     size_t processedStreamCnt = 0;
1143     // Number of streams on ISP encoder path
1144     size_t numStreamsOnEncoder = 0;
1145     size_t numYuv888OnEncoder = 0;
1146     bool bYuv888OverrideJpeg = false;
1147     cam_dimension_t largeYuv888Size = {0, 0};
1148     cam_dimension_t maxViewfinderSize = {0, 0};
1149     bool bJpegExceeds4K = false;
1150     bool bJpegOnEncoder = false;
1151     bool bUseCommonFeatureMask = false;
1152     uint32_t commonFeatureMask = 0;
1153     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1154     camera3_stream_t *inputStream = NULL;
1155     bool isJpeg = false;
1156     cam_dimension_t jpegSize = {0, 0};
1157 
1158     /*EIS configuration*/
1159     bool eisSupported = false;
1160     bool oisSupported = false;
1161     int32_t margin_index = -1;
1162     uint8_t eis_prop_set;
1163     uint32_t maxEisWidth = 0;
1164     uint32_t maxEisHeight = 0;
1165     int32_t hal_version = CAM_HAL_V3;
1166 
1167     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1168 
1169     size_t count = IS_TYPE_MAX;
1170     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1171     for (size_t i = 0; i < count; i++) {
1172         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1173             eisSupported = true;
1174             margin_index = (int32_t)i;
1175             break;
1176         }
1177     }
1178 
1179     count = CAM_OPT_STAB_MAX;
1180     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1181     for (size_t i = 0; i < count; i++) {
1182         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1183             oisSupported = true;
1184             break;
1185         }
1186     }
1187 
1188     if (eisSupported) {
1189         maxEisWidth = MAX_EIS_WIDTH;
1190         maxEisHeight = MAX_EIS_HEIGHT;
1191     }
1192 
1193     /* EIS setprop control */
1194     char eis_prop[PROPERTY_VALUE_MAX];
1195     memset(eis_prop, 0, sizeof(eis_prop));
1196     property_get("persist.camera.eis.enable", eis_prop, "0");
1197     eis_prop_set = (uint8_t)atoi(eis_prop);
1198 
1199     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1200             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1201 
1202     /* stream configurations */
1203     for (size_t i = 0; i < streamList->num_streams; i++) {
1204         camera3_stream_t *newStream = streamList->streams[i];
1205         ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1206                 "height = %d, rotation = %d, usage = 0x%x",
1207                 __func__, i, newStream->stream_type, newStream->format,
1208                 newStream->width, newStream->height, newStream->rotation,
1209                 newStream->usage);
1210         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1211                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1212             isZsl = true;
1213         }
1214         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1215             inputStream = newStream;
1216         }
1217 
1218         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1219             isJpeg = true;
1220             jpegSize.width = newStream->width;
1221             jpegSize.height = newStream->height;
1222             if (newStream->width > VIDEO_4K_WIDTH ||
1223                     newStream->height > VIDEO_4K_HEIGHT)
1224                 bJpegExceeds4K = true;
1225         }
1226 
1227         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1228                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1229             m_bIsVideo = true;
1230             videoWidth = newStream->width;
1231             videoHeight = newStream->height;
1232             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1233                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1234                 m_bIs4KVideo = true;
1235             }
1236             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1237                                   (newStream->height <= maxEisHeight);
1238         }
1239         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1240                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1241             switch (newStream->format) {
1242             case HAL_PIXEL_FORMAT_BLOB:
1243                 stallStreamCnt++;
1244                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1245                         newStream->height)) {
1246                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1247                     numStreamsOnEncoder++;
1248                     bJpegOnEncoder = true;
1249                 }
1250                 break;
1251             case HAL_PIXEL_FORMAT_RAW10:
1252             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1253             case HAL_PIXEL_FORMAT_RAW16:
1254                 rawStreamCnt++;
1255                 break;
1256             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1257                 processedStreamCnt++;
1258                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1259                         newStream->height)) {
1260                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1261                             IS_USAGE_ZSL(newStream->usage)) {
1262                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1263                     } else {
1264                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1265                     }
1266                     numStreamsOnEncoder++;
1267                 }
1268                 break;
1269             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1270                 processedStreamCnt++;
1271                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1272                         newStream->height)) {
1273                     // If Yuv888 size is not greater than 4K, set feature mask
1274                     // to SUPERSET so that it support concurrent request on
1275                     // YUV and JPEG.
1276                     if (newStream->width <= VIDEO_4K_WIDTH &&
1277                             newStream->height <= VIDEO_4K_HEIGHT) {
1278                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1279                     } else {
1280                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1281                     }
1282                     numStreamsOnEncoder++;
1283                     numYuv888OnEncoder++;
1284                     largeYuv888Size.width = newStream->width;
1285                     largeYuv888Size.height = newStream->height;
1286                 }
1287                 break;
1288             default:
1289                 processedStreamCnt++;
1290                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1291                         newStream->height)) {
1292                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1293                     numStreamsOnEncoder++;
1294                 }
1295                 break;
1296             }
1297 
1298         }
1299     }
1300 
1301     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1302         !m_bIsVideo) {
1303         m_bEisEnable = false;
1304     }
1305 
1306     /* Logic to enable/disable TNR based on specific config size/etc.*/
1307     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1308             ((videoWidth == 1920 && videoHeight == 1080) ||
1309             (videoWidth == 1280 && videoHeight == 720)) &&
1310             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1311         m_bTnrEnabled = true;
1312 
1313     /* Check if num_streams is sane */
1314     if (stallStreamCnt > MAX_STALLING_STREAMS ||
1315             rawStreamCnt > MAX_RAW_STREAMS ||
1316             processedStreamCnt > MAX_PROCESSED_STREAMS) {
1317         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1318                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1319         pthread_mutex_unlock(&mMutex);
1320         return -EINVAL;
1321     }
1322     /* Check whether we have zsl stream or 4k video case */
1323     if (isZsl && m_bIsVideo) {
1324         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1325         pthread_mutex_unlock(&mMutex);
1326         return -EINVAL;
1327     }
1328     /* Check if stream sizes are sane */
1329     if (numStreamsOnEncoder > 2) {
1330         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1331                 __func__);
1332         pthread_mutex_unlock(&mMutex);
1333         return -EINVAL;
1334     } else if (1 < numStreamsOnEncoder){
1335         bUseCommonFeatureMask = true;
1336         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1337                 __func__);
1338     }
1339 
1340     /* Check if BLOB size is greater than 4k in 4k recording case */
1341     if (m_bIs4KVideo && bJpegExceeds4K) {
1342         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1343                 __func__);
1344         pthread_mutex_unlock(&mMutex);
1345         return -EINVAL;
1346     }
1347 
1348     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1349     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1350     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1351     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1352     // configurations:
1353     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1354     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1355     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1356     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1357         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1358                 __func__);
1359         pthread_mutex_unlock(&mMutex);
1360         return -EINVAL;
1361     }
1362 
1363     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1364     // the YUV stream's size is greater or equal to the JPEG size, set common
1365     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1366     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1367             jpegSize.width, jpegSize.height) &&
1368             largeYuv888Size.width > jpegSize.width &&
1369             largeYuv888Size.height > jpegSize.height) {
1370         bYuv888OverrideJpeg = true;
1371     } else if (!isJpeg && numStreamsOnEncoder > 1) {
1372         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1373     }
1374 
1375     rc = validateStreamDimensions(streamList);
1376     if (rc == NO_ERROR) {
1377         rc = validateStreamRotations(streamList);
1378     }
1379     if (rc != NO_ERROR) {
1380         ALOGE("%s: Invalid stream configuration requested!", __func__);
1381         pthread_mutex_unlock(&mMutex);
1382         return rc;
1383     }
1384 
1385     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1386     camera3_stream_t *jpegStream = NULL;
1387     for (size_t i = 0; i < streamList->num_streams; i++) {
1388         camera3_stream_t *newStream = streamList->streams[i];
1389         CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1390                 "stream size : %d x %d, stream rotation = %d",
1391                 __func__, newStream->stream_type, newStream->format,
1392                 newStream->width, newStream->height, newStream->rotation);
1393         //if the stream is in the mStreamList validate it
1394         bool stream_exists = false;
1395         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1396                 it != mStreamInfo.end(); it++) {
1397             if ((*it)->stream == newStream) {
1398                 QCamera3ProcessingChannel *channel =
1399                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
1400                 stream_exists = true;
1401                 if (channel)
1402                     delete channel;
1403                 (*it)->status = VALID;
1404                 (*it)->stream->priv = NULL;
1405                 (*it)->channel = NULL;
1406             }
1407         }
1408         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1409             //new stream
1410             stream_info_t* stream_info;
1411             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1412             if (!stream_info) {
1413                ALOGE("%s: Could not allocate stream info", __func__);
1414                rc = -ENOMEM;
1415                pthread_mutex_unlock(&mMutex);
1416                return rc;
1417             }
1418             stream_info->stream = newStream;
1419             stream_info->status = VALID;
1420             stream_info->channel = NULL;
1421             mStreamInfo.push_back(stream_info);
1422         }
1423         /* Covers Opaque ZSL and API1 F/W ZSL */
1424         if (IS_USAGE_ZSL(newStream->usage)
1425                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1426             if (zslStream != NULL) {
1427                 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1428                 pthread_mutex_unlock(&mMutex);
1429                 return BAD_VALUE;
1430             }
1431             zslStream = newStream;
1432         }
1433         /* Covers YUV reprocess */
1434         if (inputStream != NULL) {
1435             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1436                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1437                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1438                     && inputStream->width == newStream->width
1439                     && inputStream->height == newStream->height) {
1440                 if (zslStream != NULL) {
1441                     /* This scenario indicates multiple YUV streams with same size
1442                      * as input stream have been requested, since zsl stream handle
1443                      * is solely use for the purpose of overriding the size of streams
1444                      * which share h/w streams we will just make a guess here as to
1445                      * which of the stream is a ZSL stream, this will be refactored
1446                      * once we make generic logic for streams sharing encoder output
1447                      */
1448                     CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1449                 }
1450                 zslStream = newStream;
1451             }
1452         }
1453         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1454             jpegStream = newStream;
1455         }
1456     }
1457 
1458     /* If a zsl stream is set, we know that we have configured at least one input or
1459        bidirectional stream */
1460     if (NULL != zslStream) {
1461         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1462         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1463         mInputStreamInfo.format = zslStream->format;
1464         mInputStreamInfo.usage = zslStream->usage;
1465         CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1466                 __func__, mInputStreamInfo.dim.width,
1467                 mInputStreamInfo.dim.height,
1468                 mInputStreamInfo.format, mInputStreamInfo.usage);
1469     }
1470 
1471     cleanAndSortStreamInfo();
1472     if (mMetadataChannel) {
1473         delete mMetadataChannel;
1474         mMetadataChannel = NULL;
1475     }
1476     if (mSupportChannel) {
1477         delete mSupportChannel;
1478         mSupportChannel = NULL;
1479     }
1480 
1481     if (mAnalysisChannel) {
1482         delete mAnalysisChannel;
1483         mAnalysisChannel = NULL;
1484     }
1485 
1486     if (mDummyBatchChannel) {
1487         delete mDummyBatchChannel;
1488         mDummyBatchChannel = NULL;
1489     }
1490 
1491     //Create metadata channel and initialize it
1492     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1493                     mChannelHandle, mCameraHandle->ops, captureResultCb,
1494                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1495     if (mMetadataChannel == NULL) {
1496         ALOGE("%s: failed to allocate metadata channel", __func__);
1497         rc = -ENOMEM;
1498         pthread_mutex_unlock(&mMutex);
1499         return rc;
1500     }
1501     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1502     if (rc < 0) {
1503         ALOGE("%s: metadata channel initialization failed", __func__);
1504         delete mMetadataChannel;
1505         mMetadataChannel = NULL;
1506         pthread_mutex_unlock(&mMutex);
1507         return rc;
1508     }
1509 
1510     // Create analysis stream all the time, even when h/w support is not available
1511     {
1512         mAnalysisChannel = new QCamera3SupportChannel(
1513                 mCameraHandle->camera_handle,
1514                 mChannelHandle,
1515                 mCameraHandle->ops,
1516                 &gCamCapability[mCameraId]->padding_info,
1517                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1518                 CAM_STREAM_TYPE_ANALYSIS,
1519                 &gCamCapability[mCameraId]->analysis_recommended_res,
1520                 gCamCapability[mCameraId]->analysis_recommended_format,
1521                 this,
1522                 0); // force buffer count to 0
1523         if (!mAnalysisChannel) {
1524             ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1525             pthread_mutex_unlock(&mMutex);
1526             return -ENOMEM;
1527         }
1528     }
1529 
1530     bool isRawStreamRequested = false;
1531     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1532     /* Allocate channel objects for the requested streams */
1533     for (size_t i = 0; i < streamList->num_streams; i++) {
1534         camera3_stream_t *newStream = streamList->streams[i];
1535         uint32_t stream_usage = newStream->usage;
1536         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1537         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1538         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1539                 || IS_USAGE_ZSL(newStream->usage)) &&
1540             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1541             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1542             if (bUseCommonFeatureMask) {
1543                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1544                         commonFeatureMask;
1545             } else {
1546                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1547                         CAM_QCOM_FEATURE_NONE;
1548             }
1549 
1550         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1551                 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1552         } else {
1553             //for non zsl streams find out the format
1554             switch (newStream->format) {
1555             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1556               {
1557                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1558                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1559 
1560                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1561 
1562                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1563                      if (m_bTnrEnabled && m_bTnrVideo) {
1564                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1565                              CAM_QCOM_FEATURE_CPP_TNR;
1566                      }
1567 
1568                  } else {
1569 
1570                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1571                      if (m_bTnrEnabled && m_bTnrPreview) {
1572                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1573                              CAM_QCOM_FEATURE_CPP_TNR;
1574                      }
1575                  }
1576 
1577                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1578                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1579                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1580                              newStream->height;
1581                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1582                              newStream->width;
1583                  }
1584               }
1585               break;
1586            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1587               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1588               if (isOnEncoder(maxViewfinderSize, newStream->width,
1589                       newStream->height)) {
1590                   if (bUseCommonFeatureMask)
1591                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1592                               commonFeatureMask;
1593                   else
1594                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1595                               CAM_QCOM_FEATURE_NONE;
1596               } else {
1597                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1598                           CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1599               }
1600               break;
1601            case HAL_PIXEL_FORMAT_BLOB:
1602               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1603               if (m_bIs4KVideo && !isZsl) {
1604                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1605                           = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1606               } else {
1607                   if (bUseCommonFeatureMask &&
1608                           isOnEncoder(maxViewfinderSize, newStream->width,
1609                                   newStream->height)) {
1610                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1611                   } else {
1612                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1613                   }
1614               }
1615               if (isZsl) {
1616                   if (zslStream) {
1617                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1618                               (int32_t)zslStream->width;
1619                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1620                               (int32_t)zslStream->height;
1621                   } else {
1622                       ALOGE("%s: Error, No ZSL stream identified",__func__);
1623                       pthread_mutex_unlock(&mMutex);
1624                       return -EINVAL;
1625                   }
1626               } else if (m_bIs4KVideo) {
1627                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1628                           (int32_t)videoWidth;
1629                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1630                           (int32_t)videoHeight;
1631               } else if (bYuv888OverrideJpeg) {
1632                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1633                           (int32_t)largeYuv888Size.width;
1634                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1635                           (int32_t)largeYuv888Size.height;
1636               }
1637               break;
1638            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1639            case HAL_PIXEL_FORMAT_RAW16:
1640            case HAL_PIXEL_FORMAT_RAW10:
1641               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1642               isRawStreamRequested = true;
1643               break;
1644            default:
1645               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1646               mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1647               break;
1648             }
1649 
1650         }
1651 
1652         if (newStream->priv == NULL) {
1653             //New stream, construct channel
1654             switch (newStream->stream_type) {
1655             case CAMERA3_STREAM_INPUT:
1656                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1657                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1658                 break;
1659             case CAMERA3_STREAM_BIDIRECTIONAL:
1660                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1661                     GRALLOC_USAGE_HW_CAMERA_WRITE;
1662                 break;
1663             case CAMERA3_STREAM_OUTPUT:
1664                 /* For video encoding stream, set read/write rarely
1665                  * flag so that they may be set to un-cached */
1666                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1667                     newStream->usage |=
1668                          (GRALLOC_USAGE_SW_READ_RARELY |
1669                          GRALLOC_USAGE_SW_WRITE_RARELY |
1670                          GRALLOC_USAGE_HW_CAMERA_WRITE);
1671                 else if (IS_USAGE_ZSL(newStream->usage))
1672                     CDBG("%s: ZSL usage flag skipping", __func__);
1673                 else if (newStream == zslStream
1674                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1675                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1676                 } else
1677                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1678                 break;
1679             default:
1680                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1681                 break;
1682             }
1683 
1684             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1685                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1686                 QCamera3ProcessingChannel *channel = NULL;
1687                 switch (newStream->format) {
1688                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1689                     if ((newStream->usage &
1690                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1691                             (streamList->operation_mode ==
1692                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1693                     ) {
1694                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1695                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
1696                                 &gCamCapability[mCameraId]->padding_info,
1697                                 this,
1698                                 newStream,
1699                                 (cam_stream_type_t)
1700                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1701                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1702                                 mMetadataChannel,
1703                                 0); //heap buffers are not required for HFR video channel
1704                         if (channel == NULL) {
1705                             ALOGE("%s: allocation of channel failed", __func__);
1706                             pthread_mutex_unlock(&mMutex);
1707                             return -ENOMEM;
1708                         }
1709                         //channel->getNumBuffers() will return 0 here so use
1710                         //MAX_INFLIGH_HFR_REQUESTS
1711                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1712                         newStream->priv = channel;
1713                         ALOGI("%s: num video buffers in HFR mode: %d",
1714                                 __func__, MAX_INFLIGHT_HFR_REQUESTS);
1715                     } else {
1716                         /* Copy stream contents in HFR preview only case to create
1717                          * dummy batch channel so that sensor streaming is in
1718                          * HFR mode */
1719                         if (!m_bIsVideo && (streamList->operation_mode ==
1720                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1721                             mDummyBatchStream = *newStream;
1722                         }
1723                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1724                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
1725                                 &gCamCapability[mCameraId]->padding_info,
1726                                 this,
1727                                 newStream,
1728                                 (cam_stream_type_t)
1729                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1730                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1731                                 mMetadataChannel,
1732                                 MAX_INFLIGHT_REQUESTS);
1733                         if (channel == NULL) {
1734                             ALOGE("%s: allocation of channel failed", __func__);
1735                             pthread_mutex_unlock(&mMutex);
1736                             return -ENOMEM;
1737                         }
1738                         newStream->max_buffers = channel->getNumBuffers();
1739                         newStream->priv = channel;
1740                     }
1741                     break;
1742                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1743                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1744                             mChannelHandle,
1745                             mCameraHandle->ops, captureResultCb,
1746                             &gCamCapability[mCameraId]->padding_info,
1747                             this,
1748                             newStream,
1749                             (cam_stream_type_t)
1750                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1751                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1752                             mMetadataChannel);
1753                     if (channel == NULL) {
1754                         ALOGE("%s: allocation of YUV channel failed", __func__);
1755                         pthread_mutex_unlock(&mMutex);
1756                         return -ENOMEM;
1757                     }
1758                     newStream->max_buffers = channel->getNumBuffers();
1759                     newStream->priv = channel;
1760                     break;
1761                 }
1762                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1763                 case HAL_PIXEL_FORMAT_RAW16:
1764                 case HAL_PIXEL_FORMAT_RAW10:
1765                     mRawChannel = new QCamera3RawChannel(
1766                             mCameraHandle->camera_handle, mChannelHandle,
1767                             mCameraHandle->ops, captureResultCb,
1768                             &gCamCapability[mCameraId]->padding_info,
1769                             this, newStream, CAM_QCOM_FEATURE_NONE,
1770                             mMetadataChannel,
1771                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1772                     if (mRawChannel == NULL) {
1773                         ALOGE("%s: allocation of raw channel failed", __func__);
1774                         pthread_mutex_unlock(&mMutex);
1775                         return -ENOMEM;
1776                     }
1777                     newStream->max_buffers = mRawChannel->getNumBuffers();
1778                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1779                     break;
1780                 case HAL_PIXEL_FORMAT_BLOB:
1781                     // Max live snapshot inflight buffer is 1. This is to mitigate
1782                     // frame drop issues for video snapshot. The more buffers being
1783                     // allocated, the more frame drops there are.
1784                     mPictureChannel = new QCamera3PicChannel(
1785                             mCameraHandle->camera_handle, mChannelHandle,
1786                             mCameraHandle->ops, captureResultCb,
1787                             &gCamCapability[mCameraId]->padding_info, this, newStream,
1788                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1789                             m_bIs4KVideo, isZsl, mMetadataChannel,
1790                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1791                     if (mPictureChannel == NULL) {
1792                         ALOGE("%s: allocation of channel failed", __func__);
1793                         pthread_mutex_unlock(&mMutex);
1794                         return -ENOMEM;
1795                     }
1796                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1797                     newStream->max_buffers = mPictureChannel->getNumBuffers();
1798                     mPictureChannel->overrideYuvSize(
1799                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1800                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1801                     break;
1802 
1803                 default:
1804                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1805                     break;
1806                 }
1807             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1808                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1809             } else {
1810                 ALOGE("%s: Error, Unknown stream type", __func__);
1811                 return -EINVAL;
1812             }
1813 
1814             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1815                     it != mStreamInfo.end(); it++) {
1816                 if ((*it)->stream == newStream) {
1817                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1818                     break;
1819                 }
1820             }
1821         } else {
1822             // Channel already exists for this stream
1823             // Do nothing for now
1824         }
1825 
1826     /* Do not add entries for input stream in metastream info
1827          * since there is no real stream associated with it
1828          */
1829         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1830             mStreamConfigInfo.num_streams++;
1831     }
1832 
1833     //RAW DUMP channel
1834     if (mEnableRawDump && isRawStreamRequested == false){
1835         cam_dimension_t rawDumpSize;
1836         rawDumpSize = getMaxRawSize(mCameraId);
1837         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1838                                   mChannelHandle,
1839                                   mCameraHandle->ops,
1840                                   rawDumpSize,
1841                                   &gCamCapability[mCameraId]->padding_info,
1842                                   this, CAM_QCOM_FEATURE_NONE);
1843         if (!mRawDumpChannel) {
1844             ALOGE("%s: Raw Dump channel cannot be created", __func__);
1845             pthread_mutex_unlock(&mMutex);
1846             return -ENOMEM;
1847         }
1848     }
1849 
1850 
1851     if (mAnalysisChannel) {
1852         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1853                 gCamCapability[mCameraId]->analysis_recommended_res;
1854         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1855                 CAM_STREAM_TYPE_ANALYSIS;
1856         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1857                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1858         mStreamConfigInfo.num_streams++;
1859     }
1860 
1861     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1862         mSupportChannel = new QCamera3SupportChannel(
1863                 mCameraHandle->camera_handle,
1864                 mChannelHandle,
1865                 mCameraHandle->ops,
1866                 &gCamCapability[mCameraId]->padding_info,
1867                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1868                 CAM_STREAM_TYPE_CALLBACK,
1869                 &QCamera3SupportChannel::kDim,
1870                 CAM_FORMAT_YUV_420_NV21,
1871                 this);
1872         if (!mSupportChannel) {
1873             ALOGE("%s: dummy channel cannot be created", __func__);
1874             pthread_mutex_unlock(&mMutex);
1875             return -ENOMEM;
1876         }
1877     }
1878 
1879     if (mSupportChannel) {
1880         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1881                 QCamera3SupportChannel::kDim;
1882         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1883                 CAM_STREAM_TYPE_CALLBACK;
1884         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1885                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1886         mStreamConfigInfo.num_streams++;
1887     }
1888 
1889     if (mRawDumpChannel) {
1890         cam_dimension_t rawSize;
1891         rawSize = getMaxRawSize(mCameraId);
1892         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1893                 rawSize;
1894         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1895                 CAM_STREAM_TYPE_RAW;
1896         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1897                 CAM_QCOM_FEATURE_NONE;
1898         mStreamConfigInfo.num_streams++;
1899     }
1900     /* In HFR mode, if video stream is not added, create a dummy channel so that
1901      * ISP can create a batch mode even for preview only case. This channel is
1902      * never 'start'ed (no stream-on), it is only 'initialized'  */
1903     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1904             !m_bIsVideo) {
1905         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1906                 mChannelHandle,
1907                 mCameraHandle->ops, captureResultCb,
1908                 &gCamCapability[mCameraId]->padding_info,
1909                 this,
1910                 &mDummyBatchStream,
1911                 CAM_STREAM_TYPE_VIDEO,
1912                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1913                 mMetadataChannel);
1914         if (NULL == mDummyBatchChannel) {
1915             ALOGE("%s: creation of mDummyBatchChannel failed."
1916                     "Preview will use non-hfr sensor mode ", __func__);
1917         }
1918     }
1919     if (mDummyBatchChannel) {
1920         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1921                 mDummyBatchStream.width;
1922         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1923                 mDummyBatchStream.height;
1924         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1925                 CAM_STREAM_TYPE_VIDEO;
1926         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1927                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1928         mStreamConfigInfo.num_streams++;
1929     }
1930 
1931     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1932     mStreamConfigInfo.buffer_info.max_buffers =
1933             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1934 
1935     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1936     for (pendingRequestIterator i = mPendingRequestsList.begin();
1937             i != mPendingRequestsList.end();) {
1938         i = erasePendingRequest(i);
1939     }
1940     mPendingFrameDropList.clear();
1941     // Initialize/Reset the pending buffers list
1942     mPendingBuffersMap.num_buffers = 0;
1943     mPendingBuffersMap.mPendingBufferList.clear();
1944     mPendingReprocessResultList.clear();
1945 
1946     mFirstRequest = true;
1947     mCurJpegMeta.clear();
1948     //Get min frame duration for this streams configuration
1949     deriveMinFrameDuration();
1950 
1951     /* Turn on video hint only if video stream is configured */
1952 
1953     pthread_mutex_unlock(&mMutex);
1954 
1955     return rc;
1956 }
1957 
1958 /*===========================================================================
1959  * FUNCTION   : validateCaptureRequest
1960  *
1961  * DESCRIPTION: validate a capture request from camera service
1962  *
1963  * PARAMETERS :
1964  *   @request : request from framework to process
1965  *
1966  * RETURN     :
1967  *
1968  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1969 int QCamera3HardwareInterface::validateCaptureRequest(
1970                     camera3_capture_request_t *request)
1971 {
1972     ssize_t idx = 0;
1973     const camera3_stream_buffer_t *b;
1974     CameraMetadata meta;
1975 
1976     /* Sanity check the request */
1977     if (request == NULL) {
1978         ALOGE("%s: NULL capture request", __func__);
1979         return BAD_VALUE;
1980     }
1981 
1982     if (request->settings == NULL && mFirstRequest) {
1983         /*settings cannot be null for the first request*/
1984         return BAD_VALUE;
1985     }
1986 
1987     uint32_t frameNumber = request->frame_number;
1988     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1989         ALOGE("%s: Request %d: No output buffers provided!",
1990                 __FUNCTION__, frameNumber);
1991         return BAD_VALUE;
1992     }
1993     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1994         ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1995                 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1996         return BAD_VALUE;
1997     }
1998     if (request->input_buffer != NULL) {
1999         b = request->input_buffer;
2000         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2001             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2002                     __func__, frameNumber, (long)idx);
2003             return BAD_VALUE;
2004         }
2005         if (b->release_fence != -1) {
2006             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2007                     __func__, frameNumber, (long)idx);
2008             return BAD_VALUE;
2009         }
2010         if (b->buffer == NULL) {
2011             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2012                     __func__, frameNumber, (long)idx);
2013             return BAD_VALUE;
2014         }
2015     }
2016 
2017     // Validate all buffers
2018     b = request->output_buffers;
2019     do {
2020         QCamera3ProcessingChannel *channel =
2021                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2022         if (channel == NULL) {
2023             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2024                     __func__, frameNumber, (long)idx);
2025             return BAD_VALUE;
2026         }
2027         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2028             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2029                     __func__, frameNumber, (long)idx);
2030             return BAD_VALUE;
2031         }
2032         if (b->release_fence != -1) {
2033             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2034                     __func__, frameNumber, (long)idx);
2035             return BAD_VALUE;
2036         }
2037         if (b->buffer == NULL) {
2038             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2039                     __func__, frameNumber, (long)idx);
2040             return BAD_VALUE;
2041         }
2042         if (*(b->buffer) == NULL) {
2043             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2044                     __func__, frameNumber, (long)idx);
2045             return BAD_VALUE;
2046         }
2047         idx++;
2048         b = request->output_buffers + idx;
2049     } while (idx < (ssize_t)request->num_output_buffers);
2050 
2051     return NO_ERROR;
2052 }
2053 
2054 /*===========================================================================
2055  * FUNCTION   : deriveMinFrameDuration
2056  *
2057  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2058  *              on currently configured streams.
2059  *
2060  * PARAMETERS : NONE
2061  *
2062  * RETURN     : NONE
2063  *
2064  *==========================================================================*/
deriveMinFrameDuration()2065 void QCamera3HardwareInterface::deriveMinFrameDuration()
2066 {
2067     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2068 
2069     maxJpegDim = 0;
2070     maxProcessedDim = 0;
2071     maxRawDim = 0;
2072 
2073     // Figure out maximum jpeg, processed, and raw dimensions
2074     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2075         it != mStreamInfo.end(); it++) {
2076 
2077         // Input stream doesn't have valid stream_type
2078         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2079             continue;
2080 
2081         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2082         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2083             if (dimension > maxJpegDim)
2084                 maxJpegDim = dimension;
2085         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2086                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2087                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2088             if (dimension > maxRawDim)
2089                 maxRawDim = dimension;
2090         } else {
2091             if (dimension > maxProcessedDim)
2092                 maxProcessedDim = dimension;
2093         }
2094     }
2095 
2096     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2097             MAX_SIZES_CNT);
2098 
2099     //Assume all jpeg dimensions are in processed dimensions.
2100     if (maxJpegDim > maxProcessedDim)
2101         maxProcessedDim = maxJpegDim;
2102     //Find the smallest raw dimension that is greater or equal to jpeg dimension
2103     if (maxProcessedDim > maxRawDim) {
2104         maxRawDim = INT32_MAX;
2105 
2106         for (size_t i = 0; i < count; i++) {
2107             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2108                     gCamCapability[mCameraId]->raw_dim[i].height;
2109             if (dimension >= maxProcessedDim && dimension < maxRawDim)
2110                 maxRawDim = dimension;
2111         }
2112     }
2113 
2114     //Find minimum durations for processed, jpeg, and raw
2115     for (size_t i = 0; i < count; i++) {
2116         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2117                 gCamCapability[mCameraId]->raw_dim[i].height) {
2118             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2119             break;
2120         }
2121     }
2122     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2123     for (size_t i = 0; i < count; i++) {
2124         if (maxProcessedDim ==
2125                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2126                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2127             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2128             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2129             break;
2130         }
2131     }
2132 }
2133 
2134 /*===========================================================================
2135  * FUNCTION   : getMinFrameDuration
2136  *
2137  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2138  *              and current request configuration.
2139  *
2140  * PARAMETERS : @request: requset sent by the frameworks
2141  *
2142  * RETURN     : min farme duration for a particular request
2143  *
2144  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2145 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2146 {
2147     bool hasJpegStream = false;
2148     bool hasRawStream = false;
2149     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2150         const camera3_stream_t *stream = request->output_buffers[i].stream;
2151         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2152             hasJpegStream = true;
2153         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2154                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2155                 stream->format == HAL_PIXEL_FORMAT_RAW16)
2156             hasRawStream = true;
2157     }
2158 
2159     if (!hasJpegStream)
2160         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2161     else
2162         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2163 }
2164 
2165 /*===========================================================================
2166  * FUNCTION   : handlePendingReprocResults
2167  *
2168  * DESCRIPTION: check and notify on any pending reprocess results
2169  *
2170  * PARAMETERS :
2171  *   @frame_number   : Pending request frame number
2172  *
2173  * RETURN     : int32_t type of status
2174  *              NO_ERROR  -- success
2175  *              none-zero failure code
2176  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2177 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2178 {
2179     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2180             j != mPendingReprocessResultList.end(); j++) {
2181         if (j->frame_number == frame_number) {
2182             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2183 
2184             CDBG("%s: Delayed reprocess notify %d", __func__,
2185                     frame_number);
2186 
2187             for (pendingRequestIterator k = mPendingRequestsList.begin();
2188                     k != mPendingRequestsList.end(); k++) {
2189 
2190                 if (k->frame_number == j->frame_number) {
2191                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2192                             "Take it out!!", __func__,
2193                             k->frame_number);
2194 
2195                     camera3_capture_result result;
2196                     memset(&result, 0, sizeof(camera3_capture_result));
2197                     result.frame_number = frame_number;
2198                     result.num_output_buffers = 1;
2199                     result.output_buffers =  &j->buffer;
2200                     result.input_buffer = k->input_buffer;
2201                     result.result = k->settings;
2202                     result.partial_result = PARTIAL_RESULT_COUNT;
2203                     mCallbackOps->process_capture_result(mCallbackOps, &result);
2204 
2205                     erasePendingRequest(k);
2206                     break;
2207                 }
2208             }
2209             mPendingReprocessResultList.erase(j);
2210             break;
2211         }
2212     }
2213     return NO_ERROR;
2214 }
2215 
2216 /*===========================================================================
2217  * FUNCTION   : handleBatchMetadata
2218  *
2219  * DESCRIPTION: Handles metadata buffer callback in batch mode
2220  *
2221  * PARAMETERS : @metadata_buf: metadata buffer
2222  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2223  *                 the meta buf in this method
2224  *
2225  * RETURN     :
2226  *
2227  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2228 void QCamera3HardwareInterface::handleBatchMetadata(
2229         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2230 {
2231     ATRACE_CALL();
2232 
2233     if (NULL == metadata_buf) {
2234         ALOGE("%s: metadata_buf is NULL", __func__);
2235         return;
2236     }
2237     /* In batch mode, the metdata will contain the frame number and timestamp of
2238      * the last frame in the batch. Eg: a batch containing buffers from request
2239      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2240      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2241      * multiple process_capture_results */
2242     metadata_buffer_t *metadata =
2243             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2244     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2245     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2246     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2247     uint32_t frame_number = 0, urgent_frame_number = 0;
2248     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2249     bool invalid_metadata = false;
2250     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2251     size_t loopCount = 1;
2252 
2253     int32_t *p_frame_number_valid =
2254             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2255     uint32_t *p_frame_number =
2256             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2257     int64_t *p_capture_time =
2258             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2259     int32_t *p_urgent_frame_number_valid =
2260             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2261     uint32_t *p_urgent_frame_number =
2262             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2263 
2264     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2265             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2266             (NULL == p_urgent_frame_number)) {
2267         ALOGE("%s: Invalid metadata", __func__);
2268         invalid_metadata = true;
2269     } else {
2270         frame_number_valid = *p_frame_number_valid;
2271         last_frame_number = *p_frame_number;
2272         last_frame_capture_time = *p_capture_time;
2273         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2274         last_urgent_frame_number = *p_urgent_frame_number;
2275     }
2276 
2277     /* In batchmode, when no video buffers are requested, set_parms are sent
2278      * for every capture_request. The difference between consecutive urgent
2279      * frame numbers and frame numbers should be used to interpolate the
2280      * corresponding frame numbers and time stamps */
2281     pthread_mutex_lock(&mMutex);
2282     if (urgent_frame_number_valid) {
2283         first_urgent_frame_number =
2284                 mPendingBatchMap.valueFor(last_urgent_frame_number);
2285         urgentFrameNumDiff = last_urgent_frame_number + 1 -
2286                 first_urgent_frame_number;
2287 
2288         CDBG("%s: urgent_frm: valid: %d frm_num: %d - %d",
2289                 __func__, urgent_frame_number_valid,
2290                 first_urgent_frame_number, last_urgent_frame_number);
2291     }
2292 
2293     if (frame_number_valid) {
2294         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2295         frameNumDiff = last_frame_number + 1 -
2296                 first_frame_number;
2297         mPendingBatchMap.removeItem(last_frame_number);
2298 
2299         CDBG("%s:        frm: valid: %d frm_num: %d - %d",
2300                 __func__, frame_number_valid,
2301                 first_frame_number, last_frame_number);
2302 
2303     }
2304     pthread_mutex_unlock(&mMutex);
2305 
2306     if (urgent_frame_number_valid || frame_number_valid) {
2307         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2308         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2309             ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2310                     __func__, urgentFrameNumDiff, last_urgent_frame_number);
2311         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2312             ALOGE("%s: frameNumDiff: %d frameNum: %d",
2313                     __func__, frameNumDiff, last_frame_number);
2314     }
2315 
2316     for (size_t i = 0; i < loopCount; i++) {
2317         /* handleMetadataWithLock is called even for invalid_metadata for
2318          * pipeline depth calculation */
2319         if (!invalid_metadata) {
2320             /* Infer frame number. Batch metadata contains frame number of the
2321              * last frame */
2322             if (urgent_frame_number_valid) {
2323                 if (i < urgentFrameNumDiff) {
2324                     urgent_frame_number =
2325                             first_urgent_frame_number + i;
2326                     CDBG("%s: inferred urgent frame_number: %d",
2327                             __func__, urgent_frame_number);
2328                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2329                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2330                 } else {
2331                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2332                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2333                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2334                 }
2335             }
2336 
2337             /* Infer frame number. Batch metadata contains frame number of the
2338              * last frame */
2339             if (frame_number_valid) {
2340                 if (i < frameNumDiff) {
2341                     frame_number = first_frame_number + i;
2342                     CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2343                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2344                             CAM_INTF_META_FRAME_NUMBER, frame_number);
2345                 } else {
2346                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2347                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2348                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2349                 }
2350             }
2351 
2352             if (last_frame_capture_time) {
2353                 //Infer timestamp
2354                 first_frame_capture_time = last_frame_capture_time -
2355                         (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2356                 capture_time =
2357                         first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2358                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2359                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2360                 CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2361                         __func__, last_frame_capture_time, capture_time);
2362             }
2363         }
2364         pthread_mutex_lock(&mMutex);
2365         handleMetadataWithLock(metadata_buf,
2366                 false /* free_and_bufdone_meta_buf */);
2367         pthread_mutex_unlock(&mMutex);
2368     }
2369 
2370 done_batch_metadata:
2371     /* BufDone metadata buffer */
2372     if (free_and_bufdone_meta_buf) {
2373         mMetadataChannel->bufDone(metadata_buf);
2374         free(metadata_buf);
2375     }
2376 }
2377 
2378 /*===========================================================================
2379  * FUNCTION   : handleMetadataWithLock
2380  *
2381  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2382  *
2383  * PARAMETERS : @metadata_buf: metadata buffer
2384  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2385  *                 the meta buf in this method
2386  *
2387  * RETURN     :
2388  *
2389  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2390 void QCamera3HardwareInterface::handleMetadataWithLock(
2391     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2392 {
2393     ATRACE_CALL();
2394 
2395     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2396     int32_t frame_number_valid, urgent_frame_number_valid;
2397     uint32_t frame_number, urgent_frame_number;
2398     int64_t capture_time;
2399 
2400     int32_t *p_frame_number_valid =
2401             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2402     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2403     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2404     int32_t *p_urgent_frame_number_valid =
2405             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2406     uint32_t *p_urgent_frame_number =
2407             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2408     IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2409             metadata) {
2410         CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2411                 __func__, *p_frame_number_valid, *p_frame_number);
2412     }
2413 
2414     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2415             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2416         ALOGE("%s: Invalid metadata", __func__);
2417         if (free_and_bufdone_meta_buf) {
2418             mMetadataChannel->bufDone(metadata_buf);
2419             free(metadata_buf);
2420         }
2421         goto done_metadata;
2422     } else {
2423         frame_number_valid = *p_frame_number_valid;
2424         frame_number = *p_frame_number;
2425         capture_time = *p_capture_time;
2426         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2427         urgent_frame_number = *p_urgent_frame_number;
2428     }
2429     //Partial result on process_capture_result for timestamp
2430     if (urgent_frame_number_valid) {
2431         CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2432           __func__, urgent_frame_number, capture_time);
2433 
2434         //Recieved an urgent Frame Number, handle it
2435         //using partial results
2436         for (pendingRequestIterator i =
2437                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2438             CDBG("%s: Iterator Frame = %d urgent frame = %d",
2439                 __func__, i->frame_number, urgent_frame_number);
2440 
2441             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2442                 (i->partial_result_cnt == 0)) {
2443                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2444                     __func__, i->frame_number);
2445             }
2446 
2447             if (i->frame_number == urgent_frame_number &&
2448                      i->bUrgentReceived == 0) {
2449 
2450                 camera3_capture_result_t result;
2451                 memset(&result, 0, sizeof(camera3_capture_result_t));
2452 
2453                 i->partial_result_cnt++;
2454                 i->bUrgentReceived = 1;
2455                 // Extract 3A metadata
2456                 result.result =
2457                     translateCbUrgentMetadataToResultMetadata(metadata);
2458                 // Populate metadata result
2459                 result.frame_number = urgent_frame_number;
2460                 result.num_output_buffers = 0;
2461                 result.output_buffers = NULL;
2462                 result.partial_result = i->partial_result_cnt;
2463 
2464                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2465                 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2466                      __func__, result.frame_number, capture_time);
2467                 free_camera_metadata((camera_metadata_t *)result.result);
2468                 break;
2469             }
2470         }
2471     }
2472 
2473     if (!frame_number_valid) {
2474         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2475         if (free_and_bufdone_meta_buf) {
2476             mMetadataChannel->bufDone(metadata_buf);
2477             free(metadata_buf);
2478         }
2479         goto done_metadata;
2480     }
2481     CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2482             frame_number, capture_time);
2483 
2484     for (pendingRequestIterator i = mPendingRequestsList.begin();
2485             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2486         // Flush out all entries with less or equal frame numbers.
2487 
2488         camera3_capture_result_t result;
2489         memset(&result, 0, sizeof(camera3_capture_result_t));
2490 
2491         CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2492         i->partial_result_cnt++;
2493         result.partial_result = i->partial_result_cnt;
2494 
2495         // Check whether any stream buffer corresponding to this is dropped or not
2496         // If dropped, then send the ERROR_BUFFER for the corresponding stream
2497         // The API does not expect a blob buffer to be dropped
2498         if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2499             /* Clear notify_msg structure */
2500             camera3_notify_msg_t notify_msg;
2501             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2502             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2503                     j != i->buffers.end(); j++) {
2504                if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2505                    QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2506                    uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2507                    for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2508                        if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2509                            // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2510                            ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2511                                    __func__, i->frame_number, streamID);
2512                            notify_msg.type = CAMERA3_MSG_ERROR;
2513                            notify_msg.message.error.frame_number = i->frame_number;
2514                            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2515                            notify_msg.message.error.error_stream = j->stream;
2516                            mCallbackOps->notify(mCallbackOps, &notify_msg);
2517                            ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2518                                   __func__, i->frame_number, streamID);
2519                            PendingFrameDropInfo PendingFrameDrop;
2520                            PendingFrameDrop.frame_number=i->frame_number;
2521                            PendingFrameDrop.stream_ID = streamID;
2522                            // Add the Frame drop info to mPendingFrameDropList
2523                            mPendingFrameDropList.push_back(PendingFrameDrop);
2524                       }
2525                    }
2526                } else {
2527                    ALOGE("%s: JPEG buffer dropped for frame number %d",
2528                            __func__, i->frame_number);
2529                }
2530             }
2531         }
2532 
2533         // Send empty metadata with already filled buffers for dropped metadata
2534         // and send valid metadata with already filled buffers for current metadata
2535         /* we could hit this case when we either
2536          * 1. have a pending reprocess request or
2537          * 2. miss a metadata buffer callback */
2538         if (i->frame_number < frame_number) {
2539             if (i->input_buffer) {
2540                 /* this will be handled in handleInputBufferWithLock */
2541                 i++;
2542                 continue;
2543             } else {
2544                 ALOGE("%s: Fatal: Missing metadata buffer for frame number %d", __func__, i->frame_number);
2545                 if (free_and_bufdone_meta_buf) {
2546                     mMetadataChannel->bufDone(metadata_buf);
2547                     free(metadata_buf);
2548                 }
2549                 camera3_notify_msg_t notify_msg;
2550                 memset(&notify_msg, 0, sizeof(notify_msg));
2551                 notify_msg.type = CAMERA3_MSG_ERROR;
2552                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
2553                 mCallbackOps->notify(mCallbackOps, &notify_msg);
2554                 goto done_metadata;
2555             }
2556         } else {
2557             mPendingLiveRequest--;
2558             /* Clear notify_msg structure */
2559             camera3_notify_msg_t notify_msg;
2560             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2561 
2562             // Send shutter notify to frameworks
2563             notify_msg.type = CAMERA3_MSG_SHUTTER;
2564             notify_msg.message.shutter.frame_number = i->frame_number;
2565             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2566             mCallbackOps->notify(mCallbackOps, &notify_msg);
2567 
2568             i->timestamp = capture_time;
2569 
2570             // Find channel requiring metadata, meaning internal offline postprocess
2571             // is needed.
2572             //TODO: for now, we don't support two streams requiring metadata at the same time.
2573             // (because we are not making copies, and metadata buffer is not reference counted.
2574             bool internalPproc = false;
2575             for (pendingBufferIterator iter = i->buffers.begin();
2576                     iter != i->buffers.end(); iter++) {
2577                 if (iter->need_metadata) {
2578                     internalPproc = true;
2579                     QCamera3ProcessingChannel *channel =
2580                             (QCamera3ProcessingChannel *)iter->stream->priv;
2581                     channel->queueReprocMetadata(metadata_buf);
2582                     break;
2583                 }
2584             }
2585 
2586             result.result = translateFromHalMetadata(metadata,
2587                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2588                     i->capture_intent, internalPproc);
2589 
2590             saveExifParams(metadata);
2591 
2592             if (i->blob_request) {
2593                 {
2594                     //Dump tuning metadata if enabled and available
2595                     char prop[PROPERTY_VALUE_MAX];
2596                     memset(prop, 0, sizeof(prop));
2597                     property_get("persist.camera.dumpmetadata", prop, "0");
2598                     int32_t enabled = atoi(prop);
2599                     if (enabled && metadata->is_tuning_params_valid) {
2600                         dumpMetadataToFile(metadata->tuning_params,
2601                                mMetaFrameCount,
2602                                enabled,
2603                                "Snapshot",
2604                                frame_number);
2605                     }
2606                 }
2607             }
2608 
2609             if (!internalPproc) {
2610                 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2611                 // Return metadata buffer
2612                 if (free_and_bufdone_meta_buf) {
2613                     mMetadataChannel->bufDone(metadata_buf);
2614                     free(metadata_buf);
2615                 }
2616             }
2617         }
2618         if (!result.result) {
2619             ALOGE("%s: metadata is NULL", __func__);
2620         }
2621         result.frame_number = i->frame_number;
2622         result.input_buffer = i->input_buffer;
2623         result.num_output_buffers = 0;
2624         result.output_buffers = NULL;
2625         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2626                     j != i->buffers.end(); j++) {
2627             if (j->buffer) {
2628                 result.num_output_buffers++;
2629             }
2630         }
2631 
2632         if (result.num_output_buffers > 0) {
2633             camera3_stream_buffer_t *result_buffers =
2634                 new camera3_stream_buffer_t[result.num_output_buffers];
2635             if (!result_buffers) {
2636                 ALOGE("%s: Fatal error: out of memory", __func__);
2637             }
2638             size_t result_buffers_idx = 0;
2639             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2640                     j != i->buffers.end(); j++) {
2641                 if (j->buffer) {
2642                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2643                             m != mPendingFrameDropList.end(); m++) {
2644                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2645                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2646                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2647                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2648                             ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2649                                   __func__, frame_number, streamID);
2650                             m = mPendingFrameDropList.erase(m);
2651                             break;
2652                         }
2653                     }
2654 
2655                     for (List<PendingBufferInfo>::iterator k =
2656                       mPendingBuffersMap.mPendingBufferList.begin();
2657                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2658                       if (k->buffer == j->buffer->buffer) {
2659                         CDBG("%s: Found buffer %p in pending buffer List "
2660                               "for frame %u, Take it out!!", __func__,
2661                                k->buffer, k->frame_number);
2662                         mPendingBuffersMap.num_buffers--;
2663                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
2664                         break;
2665                       }
2666                     }
2667 
2668                     result_buffers[result_buffers_idx++] = *(j->buffer);
2669                     free(j->buffer);
2670                     j->buffer = NULL;
2671                 }
2672             }
2673             result.output_buffers = result_buffers;
2674             mCallbackOps->process_capture_result(mCallbackOps, &result);
2675             CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2676                     __func__, __LINE__, result.frame_number, i->timestamp);
2677             free_camera_metadata((camera_metadata_t *)result.result);
2678             delete[] result_buffers;
2679         } else {
2680             mCallbackOps->process_capture_result(mCallbackOps, &result);
2681             CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2682                         __func__, __LINE__, result.frame_number, i->timestamp);
2683             free_camera_metadata((camera_metadata_t *)result.result);
2684         }
2685 
2686         i = erasePendingRequest(i);
2687 
2688         if (!mPendingReprocessResultList.empty()) {
2689             handlePendingReprocResults(frame_number + 1);
2690         }
2691     }
2692 
2693 done_metadata:
2694     for (pendingRequestIterator i = mPendingRequestsList.begin();
2695             i != mPendingRequestsList.end() ;i++) {
2696         i->pipeline_depth++;
2697     }
2698     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2699     unblockRequestIfNecessary();
2700 
2701 }
2702 
2703 /*===========================================================================
2704  * FUNCTION   : hdrPlusPerfLock
2705  *
2706  * DESCRIPTION: perf lock for HDR+ using custom intent
2707  *
2708  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2709  *
2710  * RETURN     : None
2711  *
2712  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)2713 void QCamera3HardwareInterface::hdrPlusPerfLock(
2714         mm_camera_super_buf_t *metadata_buf)
2715 {
2716     if (NULL == metadata_buf) {
2717         ALOGE("%s: metadata_buf is NULL", __func__);
2718         return;
2719     }
2720     metadata_buffer_t *metadata =
2721             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2722     int32_t *p_frame_number_valid =
2723             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2724     uint32_t *p_frame_number =
2725             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2726 
2727     //acquire perf lock for 5 sec after the last HDR frame is captured
2728     if (*p_frame_number_valid) {
2729         if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2730             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2731         }
2732     }
2733 
2734     //release lock after perf lock timer is expired. If lock is already released,
2735     //isTimerReset returns false
2736     if (m_perfLock.isTimerReset()) {
2737         mLastCustIntentFrmNum = -1;
2738         m_perfLock.lock_rel_timed();
2739     }
2740 }
2741 
2742 /*===========================================================================
2743  * FUNCTION   : handleInputBufferWithLock
2744  *
2745  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2746  *
2747  * PARAMETERS : @frame_number: frame number of the input buffer
2748  *
2749  * RETURN     :
2750  *
2751  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)2752 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2753 {
2754     ATRACE_CALL();
2755     pendingRequestIterator i = mPendingRequestsList.begin();
2756     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2757         i++;
2758     }
2759     if (i != mPendingRequestsList.end() && i->input_buffer) {
2760         //found the right request
2761         if (!i->shutter_notified) {
2762             CameraMetadata settings;
2763             camera3_notify_msg_t notify_msg;
2764             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2765             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2766             if(i->settings) {
2767                 settings = i->settings;
2768                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2769                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2770                 } else {
2771                     ALOGE("%s: No timestamp in input settings! Using current one.",
2772                             __func__);
2773                 }
2774             } else {
2775                 ALOGE("%s: Input settings missing!", __func__);
2776             }
2777 
2778             notify_msg.type = CAMERA3_MSG_SHUTTER;
2779             notify_msg.message.shutter.frame_number = frame_number;
2780             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2781             mCallbackOps->notify(mCallbackOps, &notify_msg);
2782             i->shutter_notified = true;
2783             CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2784                        __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2785         }
2786 
2787         if (i->input_buffer->release_fence != -1) {
2788            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2789            close(i->input_buffer->release_fence);
2790            if (rc != OK) {
2791                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2792            }
2793         }
2794 
2795         camera3_capture_result result;
2796         memset(&result, 0, sizeof(camera3_capture_result));
2797         result.frame_number = frame_number;
2798         result.result = i->settings;
2799         result.input_buffer = i->input_buffer;
2800         result.partial_result = PARTIAL_RESULT_COUNT;
2801 
2802         mCallbackOps->process_capture_result(mCallbackOps, &result);
2803         CDBG("%s: Input request metadata and input buffer frame_number = %u",
2804                        __func__, i->frame_number);
2805         i = erasePendingRequest(i);
2806     } else {
2807         ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2808     }
2809 }
2810 
2811 /*===========================================================================
2812  * FUNCTION   : handleBufferWithLock
2813  *
2814  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2815  *
2816  * PARAMETERS : @buffer: image buffer for the callback
2817  *              @frame_number: frame number of the image buffer
2818  *
2819  * RETURN     :
2820  *
2821  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)2822 void QCamera3HardwareInterface::handleBufferWithLock(
2823     camera3_stream_buffer_t *buffer, uint32_t frame_number)
2824 {
2825     ATRACE_CALL();
2826     // If the frame number doesn't exist in the pending request list,
2827     // directly send the buffer to the frameworks, and update pending buffers map
2828     // Otherwise, book-keep the buffer.
2829     pendingRequestIterator i = mPendingRequestsList.begin();
2830     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2831         i++;
2832     }
2833     if (i == mPendingRequestsList.end()) {
2834         // Verify all pending requests frame_numbers are greater
2835         for (pendingRequestIterator j = mPendingRequestsList.begin();
2836                 j != mPendingRequestsList.end(); j++) {
2837             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
2838                 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
2839                         __func__, j->frame_number, frame_number);
2840             }
2841         }
2842         camera3_capture_result_t result;
2843         memset(&result, 0, sizeof(camera3_capture_result_t));
2844         result.result = NULL;
2845         result.frame_number = frame_number;
2846         result.num_output_buffers = 1;
2847         result.partial_result = 0;
2848         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2849                 m != mPendingFrameDropList.end(); m++) {
2850             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2851             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2852             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2853                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2854                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2855                         __func__, frame_number, streamID);
2856                 m = mPendingFrameDropList.erase(m);
2857                 break;
2858             }
2859         }
2860         result.output_buffers = buffer;
2861         CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2862                 __func__, frame_number, buffer->buffer);
2863 
2864         for (List<PendingBufferInfo>::iterator k =
2865                 mPendingBuffersMap.mPendingBufferList.begin();
2866                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2867             if (k->buffer == buffer->buffer) {
2868                 CDBG("%s: Found Frame buffer, take it out from list",
2869                         __func__);
2870 
2871                 mPendingBuffersMap.num_buffers--;
2872                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2873                 break;
2874             }
2875         }
2876         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2877             __func__, mPendingBuffersMap.num_buffers);
2878 
2879         mCallbackOps->process_capture_result(mCallbackOps, &result);
2880     } else {
2881         if (i->input_buffer) {
2882             CameraMetadata settings;
2883             camera3_notify_msg_t notify_msg;
2884             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2885             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2886             if(i->settings) {
2887                 settings = i->settings;
2888                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2889                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2890                 } else {
2891                     ALOGE("%s: No timestamp in input settings! Using current one.",
2892                             __func__);
2893                 }
2894             } else {
2895                 ALOGE("%s: Input settings missing!", __func__);
2896             }
2897 
2898             notify_msg.type = CAMERA3_MSG_SHUTTER;
2899             notify_msg.message.shutter.frame_number = frame_number;
2900             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2901 
2902             if (i->input_buffer->release_fence != -1) {
2903                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2904                close(i->input_buffer->release_fence);
2905                if (rc != OK) {
2906                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2907                }
2908             }
2909 
2910             for (List<PendingBufferInfo>::iterator k =
2911                     mPendingBuffersMap.mPendingBufferList.begin();
2912                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2913                 if (k->buffer == buffer->buffer) {
2914                     CDBG("%s: Found Frame buffer, take it out from list",
2915                             __func__);
2916 
2917                     mPendingBuffersMap.num_buffers--;
2918                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
2919                     break;
2920                 }
2921             }
2922             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2923                 __func__, mPendingBuffersMap.num_buffers);
2924 
2925             bool notifyNow = true;
2926             for (pendingRequestIterator j = mPendingRequestsList.begin();
2927                     j != mPendingRequestsList.end(); j++) {
2928                 if (j->frame_number < frame_number) {
2929                     notifyNow = false;
2930                     break;
2931                 }
2932             }
2933 
2934             if (notifyNow) {
2935                 camera3_capture_result result;
2936                 memset(&result, 0, sizeof(camera3_capture_result));
2937                 result.frame_number = frame_number;
2938                 result.result = i->settings;
2939                 result.input_buffer = i->input_buffer;
2940                 result.num_output_buffers = 1;
2941                 result.output_buffers = buffer;
2942                 result.partial_result = PARTIAL_RESULT_COUNT;
2943 
2944                 mCallbackOps->notify(mCallbackOps, &notify_msg);
2945                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2946                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2947                 i = erasePendingRequest(i);
2948             } else {
2949                 // Cache reprocess result for later
2950                 PendingReprocessResult pendingResult;
2951                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2952                 pendingResult.notify_msg = notify_msg;
2953                 pendingResult.buffer = *buffer;
2954                 pendingResult.frame_number = frame_number;
2955                 mPendingReprocessResultList.push_back(pendingResult);
2956                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2957             }
2958         } else {
2959             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2960                 j != i->buffers.end(); j++) {
2961                 if (j->stream == buffer->stream) {
2962                     if (j->buffer != NULL) {
2963                         ALOGE("%s: Error: buffer is already set", __func__);
2964                     } else {
2965                         j->buffer = (camera3_stream_buffer_t *)malloc(
2966                             sizeof(camera3_stream_buffer_t));
2967                         *(j->buffer) = *buffer;
2968                         CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2969                             __func__, buffer, frame_number);
2970                     }
2971                 }
2972             }
2973         }
2974     }
2975 }
2976 
2977 /*===========================================================================
2978  * FUNCTION   : unblockRequestIfNecessary
2979  *
2980  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2981  *              that mMutex is held when this function is called.
2982  *
2983  * PARAMETERS :
2984  *
2985  * RETURN     :
2986  *
2987  *==========================================================================*/
unblockRequestIfNecessary()2988 void QCamera3HardwareInterface::unblockRequestIfNecessary()
2989 {
2990    // Unblock process_capture_request
2991    pthread_cond_signal(&mRequestCond);
2992 }
2993 
2994 
2995 /*===========================================================================
2996  * FUNCTION   : processCaptureRequest
2997  *
2998  * DESCRIPTION: process a capture request from camera service
2999  *
3000  * PARAMETERS :
3001  *   @request : request from framework to process
3002  *
3003  * RETURN     :
3004  *
3005  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3006 int QCamera3HardwareInterface::processCaptureRequest(
3007                     camera3_capture_request_t *request)
3008 {
3009     ATRACE_CALL();
3010     int rc = NO_ERROR;
3011     int32_t request_id;
3012     CameraMetadata meta;
3013     uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3014     uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3015     bool isVidBufRequested = false;
3016     camera3_stream_buffer_t *pInputBuffer = NULL;
3017 
3018     pthread_mutex_lock(&mMutex);
3019 
3020     rc = validateCaptureRequest(request);
3021     if (rc != NO_ERROR) {
3022         ALOGE("%s: incoming request is not valid", __func__);
3023         pthread_mutex_unlock(&mMutex);
3024         return rc;
3025     }
3026 
3027     meta = request->settings;
3028 
3029     // For first capture request, send capture intent, and
3030     // stream on all streams
3031     if (mFirstRequest) {
3032         // send an unconfigure to the backend so that the isp
3033         // resources are deallocated
3034         if (!mFirstConfiguration) {
3035             cam_stream_size_info_t stream_config_info;
3036             int32_t hal_version = CAM_HAL_V3;
3037             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3038             stream_config_info.buffer_info.min_buffers =
3039                     MIN_INFLIGHT_REQUESTS;
3040             stream_config_info.buffer_info.max_buffers =
3041                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3042             clear_metadata_buffer(mParameters);
3043             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3044                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3045             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3046                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3047             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3048                     mParameters);
3049             if (rc < 0) {
3050                 ALOGE("%s: set_parms for unconfigure failed", __func__);
3051                 pthread_mutex_unlock(&mMutex);
3052                 return rc;
3053             }
3054         }
3055         m_perfLock.lock_acq();
3056         /* get eis information for stream configuration */
3057         cam_is_type_t is_type;
3058         char is_type_value[PROPERTY_VALUE_MAX];
3059         property_get("persist.camera.is_type", is_type_value, "0");
3060         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3061 
3062         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3063             int32_t hal_version = CAM_HAL_V3;
3064             uint8_t captureIntent =
3065                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3066             mCaptureIntent = captureIntent;
3067             clear_metadata_buffer(mParameters);
3068             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3069             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3070         }
3071 
3072         //If EIS is enabled, turn it on for video
3073         bool setEis = m_bEisEnable && m_bEisSupportedSize;
3074         int32_t vsMode;
3075         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3076         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3077             rc = BAD_VALUE;
3078         }
3079 
3080         //IS type will be 0 unless EIS is supported. If EIS is supported
3081         //it could either be 1 or 4 depending on the stream and video size
3082         if (setEis) {
3083             if (!m_bEisSupportedSize) {
3084                 is_type = IS_TYPE_DIS;
3085             } else {
3086                 is_type = IS_TYPE_EIS_2_0;
3087             }
3088             mStreamConfigInfo.is_type = is_type;
3089         } else {
3090             mStreamConfigInfo.is_type = IS_TYPE_NONE;
3091         }
3092 
3093         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3094                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3095         int32_t tintless_value = 1;
3096         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3097                 CAM_INTF_PARM_TINTLESS, tintless_value);
3098         //Disable CDS for HFR mode and if mPprocBypass = true.
3099         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3100         //after every configure_stream
3101         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3102                 (m_bIsVideo)) {
3103             int32_t cds = CAM_CDS_MODE_OFF;
3104             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3105                     CAM_INTF_PARM_CDS_MODE, cds))
3106                 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3107 
3108         }
3109         setMobicat();
3110 
3111         /* Set fps and hfr mode while sending meta stream info so that sensor
3112          * can configure appropriate streaming mode */
3113         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3114         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3115             rc = setHalFpsRange(meta, mParameters);
3116             if (rc != NO_ERROR) {
3117                 ALOGE("%s: setHalFpsRange failed", __func__);
3118             }
3119         }
3120         if (meta.exists(ANDROID_CONTROL_MODE)) {
3121             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3122             rc = extractSceneMode(meta, metaMode, mParameters);
3123             if (rc != NO_ERROR) {
3124                 ALOGE("%s: extractSceneMode failed", __func__);
3125             }
3126         }
3127 
3128         //TODO: validate the arguments, HSV scenemode should have only the
3129         //advertised fps ranges
3130 
3131         /*set the capture intent, hal version, tintless, stream info,
3132          *and disenable parameters to the backend*/
3133         CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3134         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3135                     mParameters);
3136 
3137         cam_dimension_t sensor_dim;
3138         memset(&sensor_dim, 0, sizeof(sensor_dim));
3139         rc = getSensorOutputSize(sensor_dim);
3140         if (rc != NO_ERROR) {
3141             ALOGE("%s: Failed to get sensor output size", __func__);
3142             pthread_mutex_unlock(&mMutex);
3143             goto error_exit;
3144         }
3145 
3146         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3147                 gCamCapability[mCameraId]->active_array_size.height,
3148                 sensor_dim.width, sensor_dim.height);
3149 
3150         /* Set batchmode before initializing channel. Since registerBuffer
3151          * internally initializes some of the channels, better set batchmode
3152          * even before first register buffer */
3153         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3154             it != mStreamInfo.end(); it++) {
3155             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3156             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3157                     && mBatchSize) {
3158                 rc = channel->setBatchSize(mBatchSize);
3159                 //Disable per frame map unmap for HFR/batchmode case
3160                 rc |= channel->setPerFrameMapUnmap(false);
3161                 if (NO_ERROR != rc) {
3162                     ALOGE("%s : Channel init failed %d", __func__, rc);
3163                     pthread_mutex_unlock(&mMutex);
3164                     goto error_exit;
3165                 }
3166             }
3167         }
3168 
3169         //First initialize all streams
3170         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3171             it != mStreamInfo.end(); it++) {
3172             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3173             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3174                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3175                setEis)
3176                 rc = channel->initialize(is_type);
3177             else {
3178                 rc = channel->initialize(IS_TYPE_NONE);
3179             }
3180             if (NO_ERROR != rc) {
3181                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
3182                 pthread_mutex_unlock(&mMutex);
3183                 goto error_exit;
3184             }
3185         }
3186 
3187         if (mRawDumpChannel) {
3188             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3189             if (rc != NO_ERROR) {
3190                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3191                 pthread_mutex_unlock(&mMutex);
3192                 goto error_exit;
3193             }
3194         }
3195         if (mSupportChannel) {
3196             rc = mSupportChannel->initialize(IS_TYPE_NONE);
3197             if (rc < 0) {
3198                 ALOGE("%s: Support channel initialization failed", __func__);
3199                 pthread_mutex_unlock(&mMutex);
3200                 goto error_exit;
3201             }
3202         }
3203         if (mAnalysisChannel) {
3204             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3205             if (rc < 0) {
3206                 ALOGE("%s: Analysis channel initialization failed", __func__);
3207                 pthread_mutex_unlock(&mMutex);
3208                 goto error_exit;
3209             }
3210         }
3211         if (mDummyBatchChannel) {
3212             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3213             if (rc < 0) {
3214                 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3215                 pthread_mutex_unlock(&mMutex);
3216                 goto error_exit;
3217             }
3218             rc = mDummyBatchChannel->initialize(is_type);
3219             if (rc < 0) {
3220                 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3221                 pthread_mutex_unlock(&mMutex);
3222                 goto error_exit;
3223             }
3224         }
3225 
3226         // Set bundle info
3227         rc = setBundleInfo();
3228         if (rc < 0) {
3229             ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3230             pthread_mutex_unlock(&mMutex);
3231             goto error_exit;
3232         }
3233 
3234         //Then start them.
3235         CDBG_HIGH("%s: Start META Channel", __func__);
3236         rc = mMetadataChannel->start();
3237         if (rc < 0) {
3238             ALOGE("%s: META channel start failed", __func__);
3239             pthread_mutex_unlock(&mMutex);
3240             goto error_exit;
3241         }
3242 
3243         if (mAnalysisChannel) {
3244             rc = mAnalysisChannel->start();
3245             if (rc < 0) {
3246                 ALOGE("%s: Analysis channel start failed", __func__);
3247                 mMetadataChannel->stop();
3248                 pthread_mutex_unlock(&mMutex);
3249                 goto error_exit;
3250             }
3251         }
3252 
3253         if (mSupportChannel) {
3254             rc = mSupportChannel->start();
3255             if (rc < 0) {
3256                 ALOGE("%s: Support channel start failed", __func__);
3257                 mMetadataChannel->stop();
3258                 /* Although support and analysis are mutually exclusive today
3259                    adding it in anycase for future proofing */
3260                 if (mAnalysisChannel) {
3261                     mAnalysisChannel->stop();
3262                 }
3263                 pthread_mutex_unlock(&mMutex);
3264                 goto error_exit;
3265             }
3266         }
3267         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3268             it != mStreamInfo.end(); it++) {
3269             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3270             CDBG_HIGH("%s: Start Processing Channel mask=%d",
3271                     __func__, channel->getStreamTypeMask());
3272             rc = channel->start();
3273             if (rc < 0) {
3274                 ALOGE("%s: channel start failed", __func__);
3275                 pthread_mutex_unlock(&mMutex);
3276                 goto error_exit;
3277             }
3278         }
3279 
3280         if (mRawDumpChannel) {
3281             CDBG("%s: Starting raw dump stream",__func__);
3282             rc = mRawDumpChannel->start();
3283             if (rc != NO_ERROR) {
3284                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3285                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3286                       it != mStreamInfo.end(); it++) {
3287                     QCamera3Channel *channel =
3288                         (QCamera3Channel *)(*it)->stream->priv;
3289                     ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3290                         channel->getStreamTypeMask());
3291                     channel->stop();
3292                 }
3293                 if (mSupportChannel)
3294                     mSupportChannel->stop();
3295                 if (mAnalysisChannel) {
3296                     mAnalysisChannel->stop();
3297                 }
3298                 mMetadataChannel->stop();
3299                 pthread_mutex_unlock(&mMutex);
3300                 goto error_exit;
3301             }
3302         }
3303 
3304         if (mChannelHandle) {
3305 
3306             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3307                     mChannelHandle);
3308             if (rc != NO_ERROR) {
3309                 ALOGE("%s: start_channel failed %d", __func__, rc);
3310                 pthread_mutex_unlock(&mMutex);
3311                 goto error_exit;
3312             }
3313         }
3314 
3315 
3316         goto no_error;
3317 error_exit:
3318         m_perfLock.lock_rel();
3319         return rc;
3320 no_error:
3321         m_perfLock.lock_rel();
3322 
3323         mWokenUpByDaemon = false;
3324         mPendingLiveRequest = 0;
3325         mFirstConfiguration = false;
3326         enablePowerHint();
3327     }
3328 
3329     uint32_t frameNumber = request->frame_number;
3330     cam_stream_ID_t streamID;
3331 
3332     if (meta.exists(ANDROID_REQUEST_ID)) {
3333         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3334         mCurrentRequestId = request_id;
3335         CDBG("%s: Received request with id: %d",__func__, request_id);
3336     } else if (mFirstRequest || mCurrentRequestId == -1){
3337         ALOGE("%s: Unable to find request id field, \
3338                 & no previous id available", __func__);
3339         pthread_mutex_unlock(&mMutex);
3340         return NAME_NOT_FOUND;
3341     } else {
3342         CDBG("%s: Re-using old request id", __func__);
3343         request_id = mCurrentRequestId;
3344     }
3345 
3346     CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3347                                     __func__, __LINE__,
3348                                     request->num_output_buffers,
3349                                     request->input_buffer,
3350                                     frameNumber);
3351     // Acquire all request buffers first
3352     streamID.num_streams = 0;
3353     int blob_request = 0;
3354     uint32_t snapshotStreamId = 0;
3355     for (size_t i = 0; i < request->num_output_buffers; i++) {
3356         const camera3_stream_buffer_t& output = request->output_buffers[i];
3357         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3358 
3359         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3360             //Call function to store local copy of jpeg data for encode params.
3361             blob_request = 1;
3362             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3363         }
3364 
3365         if (output.acquire_fence != -1) {
3366            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3367            close(output.acquire_fence);
3368            if (rc != OK) {
3369               ALOGE("%s: sync wait failed %d", __func__, rc);
3370               pthread_mutex_unlock(&mMutex);
3371               return rc;
3372            }
3373         }
3374 
3375         streamID.streamID[streamID.num_streams] =
3376             channel->getStreamID(channel->getStreamTypeMask());
3377         streamID.num_streams++;
3378 
3379         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3380             isVidBufRequested = true;
3381         }
3382     }
3383 
3384     if (blob_request && mRawDumpChannel) {
3385         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3386         streamID.streamID[streamID.num_streams] =
3387             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3388         streamID.num_streams++;
3389     }
3390 
3391     if(request->input_buffer == NULL) {
3392         /* Parse the settings:
3393          * - For every request in NORMAL MODE
3394          * - For every request in HFR mode during preview only case
3395          * - For first request of every batch in HFR mode during video
3396          * recording. In batchmode the same settings except frame number is
3397          * repeated in each request of the batch.
3398          */
3399         if (!mBatchSize ||
3400            (mBatchSize && !isVidBufRequested) ||
3401            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3402             rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3403             if (rc < 0) {
3404                 ALOGE("%s: fail to set frame parameters", __func__);
3405                 pthread_mutex_unlock(&mMutex);
3406                 return rc;
3407             }
3408         }
3409         /* For batchMode HFR, setFrameParameters is not called for every
3410          * request. But only frame number of the latest request is parsed.
3411          * Keep track of first and last frame numbers in a batch so that
3412          * metadata for the frame numbers of batch can be duplicated in
3413          * handleBatchMetadta */
3414         if (mBatchSize) {
3415             if (!mToBeQueuedVidBufs) {
3416                 //start of the batch
3417                 mFirstFrameNumberInBatch = request->frame_number;
3418             }
3419             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3420                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3421                 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3422                 return BAD_VALUE;
3423             }
3424         }
3425         if (mNeedSensorRestart) {
3426             /* Unlock the mutex as restartSensor waits on the channels to be
3427              * stopped, which in turn calls stream callback functions -
3428              * handleBufferWithLock and handleMetadataWithLock */
3429             pthread_mutex_unlock(&mMutex);
3430             rc = dynamicUpdateMetaStreamInfo();
3431             if (rc != NO_ERROR) {
3432                 ALOGE("%s: Restarting the sensor failed", __func__);
3433                 return BAD_VALUE;
3434             }
3435             mNeedSensorRestart = false;
3436             pthread_mutex_lock(&mMutex);
3437         }
3438     } else {
3439 
3440         if (request->input_buffer->acquire_fence != -1) {
3441            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3442            close(request->input_buffer->acquire_fence);
3443            if (rc != OK) {
3444               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3445               pthread_mutex_unlock(&mMutex);
3446               return rc;
3447            }
3448         }
3449     }
3450 
3451     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3452         mLastCustIntentFrmNum = frameNumber;
3453     }
3454     /* Update pending request list and pending buffers map */
3455     PendingRequestInfo pendingRequest;
3456     pendingRequestIterator latestRequest;
3457     pendingRequest.frame_number = frameNumber;
3458     pendingRequest.num_buffers = request->num_output_buffers;
3459     pendingRequest.request_id = request_id;
3460     pendingRequest.blob_request = blob_request;
3461     pendingRequest.timestamp = 0;
3462     pendingRequest.bUrgentReceived = 0;
3463     if (request->input_buffer) {
3464         pendingRequest.input_buffer =
3465                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3466         *(pendingRequest.input_buffer) = *(request->input_buffer);
3467         pInputBuffer = pendingRequest.input_buffer;
3468     } else {
3469        pendingRequest.input_buffer = NULL;
3470        pInputBuffer = NULL;
3471     }
3472 
3473     pendingRequest.pipeline_depth = 0;
3474     pendingRequest.partial_result_cnt = 0;
3475     extractJpegMetadata(mCurJpegMeta, request);
3476     pendingRequest.jpegMetadata = mCurJpegMeta;
3477     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3478     pendingRequest.shutter_notified = false;
3479 
3480     //extract capture intent
3481     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3482         mCaptureIntent =
3483                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3484     }
3485     pendingRequest.capture_intent = mCaptureIntent;
3486 
3487     for (size_t i = 0; i < request->num_output_buffers; i++) {
3488         RequestedBufferInfo requestedBuf;
3489         memset(&requestedBuf, 0, sizeof(requestedBuf));
3490         requestedBuf.stream = request->output_buffers[i].stream;
3491         requestedBuf.buffer = NULL;
3492         pendingRequest.buffers.push_back(requestedBuf);
3493 
3494         // Add to buffer handle the pending buffers list
3495         PendingBufferInfo bufferInfo;
3496         bufferInfo.frame_number = frameNumber;
3497         bufferInfo.buffer = request->output_buffers[i].buffer;
3498         bufferInfo.stream = request->output_buffers[i].stream;
3499         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3500         mPendingBuffersMap.num_buffers++;
3501         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3502         CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3503                 __func__, frameNumber, bufferInfo.buffer,
3504                 channel->getStreamTypeMask(), bufferInfo.stream->format);
3505     }
3506     latestRequest = mPendingRequestsList.insert(
3507             mPendingRequestsList.end(), pendingRequest);
3508     if(mFlush) {
3509         pthread_mutex_unlock(&mMutex);
3510         return NO_ERROR;
3511     }
3512 
3513     // Notify metadata channel we receive a request
3514     mMetadataChannel->request(NULL, frameNumber);
3515 
3516     if(request->input_buffer != NULL){
3517         CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3518         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3519         if (NO_ERROR != rc) {
3520             ALOGE("%s: fail to set reproc parameters", __func__);
3521             pthread_mutex_unlock(&mMutex);
3522             return rc;
3523         }
3524     }
3525 
3526     // Call request on other streams
3527     uint32_t streams_need_metadata = 0;
3528     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3529     for (size_t i = 0; i < request->num_output_buffers; i++) {
3530         const camera3_stream_buffer_t& output = request->output_buffers[i];
3531         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3532 
3533         if (channel == NULL) {
3534             ALOGE("%s: invalid channel pointer for stream", __func__);
3535             continue;
3536         }
3537 
3538         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3539             if(request->input_buffer != NULL){
3540                 rc = channel->request(output.buffer, frameNumber,
3541                         pInputBuffer, &mReprocMeta);
3542                 if (rc < 0) {
3543                     ALOGE("%s: Fail to request on picture channel", __func__);
3544                     pthread_mutex_unlock(&mMutex);
3545                     return rc;
3546                 }
3547             } else {
3548                 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3549                         __LINE__, output.buffer, frameNumber);
3550                 if (!request->settings) {
3551                     rc = channel->request(output.buffer, frameNumber,
3552                             NULL, mPrevParameters);
3553                 } else {
3554                     rc = channel->request(output.buffer, frameNumber,
3555                             NULL, mParameters);
3556                 }
3557                 if (rc < 0) {
3558                     ALOGE("%s: Fail to request on picture channel", __func__);
3559                     pthread_mutex_unlock(&mMutex);
3560                     return rc;
3561                 }
3562                 pendingBufferIter->need_metadata = true;
3563                 streams_need_metadata++;
3564             }
3565         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3566             bool needMetadata = false;
3567             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3568             rc = yuvChannel->request(output.buffer, frameNumber,
3569                     pInputBuffer,
3570                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3571             if (rc < 0) {
3572                 ALOGE("%s: Fail to request on YUV channel", __func__);
3573                 pthread_mutex_unlock(&mMutex);
3574                 return rc;
3575             }
3576             pendingBufferIter->need_metadata = needMetadata;
3577             if (needMetadata)
3578                 streams_need_metadata += 1;
3579             CDBG("%s: calling YUV channel request, need_metadata is %d",
3580                     __func__, needMetadata);
3581         } else {
3582             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3583                 __LINE__, output.buffer, frameNumber);
3584             rc = channel->request(output.buffer, frameNumber);
3585             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3586                     && mBatchSize) {
3587                 mToBeQueuedVidBufs++;
3588                 if (mToBeQueuedVidBufs == mBatchSize) {
3589                     channel->queueBatchBuf();
3590                 }
3591             }
3592             if (rc < 0) {
3593                 ALOGE("%s: request failed", __func__);
3594                 pthread_mutex_unlock(&mMutex);
3595                 return rc;
3596             }
3597         }
3598         pendingBufferIter++;
3599     }
3600 
3601     //If 2 streams have need_metadata set to true, fail the request, unless
3602     //we copy/reference count the metadata buffer
3603     if (streams_need_metadata > 1) {
3604         ALOGE("%s: not supporting request in which two streams requires"
3605                 " 2 HAL metadata for reprocessing", __func__);
3606         pthread_mutex_unlock(&mMutex);
3607         return -EINVAL;
3608     }
3609 
3610     if(request->input_buffer == NULL) {
3611         /* Set the parameters to backend:
3612          * - For every request in NORMAL MODE
3613          * - For every request in HFR mode during preview only case
3614          * - Once every batch in HFR mode during video recording
3615          */
3616         if (!mBatchSize ||
3617            (mBatchSize && !isVidBufRequested) ||
3618            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3619             CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3620                     __func__, mBatchSize, isVidBufRequested,
3621                     mToBeQueuedVidBufs);
3622             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3623                     mParameters);
3624             if (rc < 0) {
3625                 ALOGE("%s: set_parms failed", __func__);
3626             }
3627             /* reset to zero coz, the batch is queued */
3628             mToBeQueuedVidBufs = 0;
3629             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3630         }
3631         mPendingLiveRequest++;
3632     }
3633 
3634     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3635 
3636     mFirstRequest = false;
3637     // Added a timed condition wait
3638     struct timespec ts;
3639     uint8_t isValidTimeout = 1;
3640     rc = clock_gettime(CLOCK_REALTIME, &ts);
3641     if (rc < 0) {
3642       isValidTimeout = 0;
3643       ALOGE("%s: Error reading the real time clock!!", __func__);
3644     }
3645     else {
3646       // Make timeout as 5 sec for request to be honored
3647       ts.tv_sec += 5;
3648     }
3649     //Block on conditional variable
3650     if (mBatchSize) {
3651         /* For HFR, more buffers are dequeued upfront to improve the performance */
3652         minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3653         maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3654     }
3655     while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3656         if (!isValidTimeout) {
3657             CDBG("%s: Blocking on conditional wait", __func__);
3658             pthread_cond_wait(&mRequestCond, &mMutex);
3659         }
3660         else {
3661             CDBG("%s: Blocking on timed conditional wait", __func__);
3662             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3663             if (rc == ETIMEDOUT) {
3664                 rc = -ENODEV;
3665                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
3666                 break;
3667             }
3668         }
3669         CDBG("%s: Unblocked", __func__);
3670         if (mWokenUpByDaemon) {
3671             mWokenUpByDaemon = false;
3672             if (mPendingLiveRequest < maxInFlightRequests)
3673                 break;
3674         }
3675     }
3676     pthread_mutex_unlock(&mMutex);
3677 
3678     return rc;
3679 }
3680 
3681 /*===========================================================================
3682  * FUNCTION   : dump
3683  *
3684  * DESCRIPTION:
3685  *
3686  * PARAMETERS :
3687  *
3688  *
3689  * RETURN     :
3690  *==========================================================================*/
dump(int fd)3691 void QCamera3HardwareInterface::dump(int fd)
3692 {
3693     pthread_mutex_lock(&mMutex);
3694     dprintf(fd, "\n Camera HAL3 information Begin \n");
3695 
3696     dprintf(fd, "\nNumber of pending requests: %zu \n",
3697         mPendingRequestsList.size());
3698     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3699     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3700     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3701     for(pendingRequestIterator i = mPendingRequestsList.begin();
3702             i != mPendingRequestsList.end(); i++) {
3703         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3704         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3705         i->input_buffer);
3706     }
3707     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3708                 mPendingBuffersMap.num_buffers);
3709     dprintf(fd, "-------+------------------\n");
3710     dprintf(fd, " Frame | Stream type mask \n");
3711     dprintf(fd, "-------+------------------\n");
3712     for(List<PendingBufferInfo>::iterator i =
3713         mPendingBuffersMap.mPendingBufferList.begin();
3714         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3715         QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3716         dprintf(fd, " %5d | %11d \n",
3717                 i->frame_number, channel->getStreamTypeMask());
3718     }
3719     dprintf(fd, "-------+------------------\n");
3720 
3721     dprintf(fd, "\nPending frame drop list: %zu\n",
3722         mPendingFrameDropList.size());
3723     dprintf(fd, "-------+-----------\n");
3724     dprintf(fd, " Frame | Stream ID \n");
3725     dprintf(fd, "-------+-----------\n");
3726     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3727         i != mPendingFrameDropList.end(); i++) {
3728         dprintf(fd, " %5d | %9d \n",
3729             i->frame_number, i->stream_ID);
3730     }
3731     dprintf(fd, "-------+-----------\n");
3732 
3733     dprintf(fd, "\n Camera HAL3 information End \n");
3734 
3735     /* use dumpsys media.camera as trigger to send update debug level event */
3736     mUpdateDebugLevel = true;
3737     pthread_mutex_unlock(&mMutex);
3738     return;
3739 }
3740 
3741 /*===========================================================================
3742  * FUNCTION   : flush
3743  *
3744  * DESCRIPTION:
3745  *
3746  * PARAMETERS :
3747  *
3748  *
3749  * RETURN     :
3750  *==========================================================================*/
flush()3751 int QCamera3HardwareInterface::flush()
3752 {
3753     ATRACE_CALL();
3754     int32_t rc = NO_ERROR;
3755 
3756     CDBG("%s: Unblocking Process Capture Request", __func__);
3757     pthread_mutex_lock(&mMutex);
3758 
3759     if (mFirstRequest) {
3760         pthread_mutex_unlock(&mMutex);
3761         return NO_ERROR;
3762     }
3763 
3764     mFlush = true;
3765     pthread_mutex_unlock(&mMutex);
3766 
3767     rc = stopAllChannels();
3768     if (rc < 0) {
3769         ALOGE("%s: stopAllChannels failed", __func__);
3770         return rc;
3771     }
3772     if (mChannelHandle) {
3773         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3774                 mChannelHandle);
3775     }
3776 
3777     // Reset bundle info
3778     rc = setBundleInfo();
3779     if (rc < 0) {
3780         ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3781         return rc;
3782     }
3783 
3784     // Mutex Lock
3785     pthread_mutex_lock(&mMutex);
3786 
3787     // Unblock process_capture_request
3788     mPendingLiveRequest = 0;
3789     pthread_cond_signal(&mRequestCond);
3790 
3791     rc = notifyErrorForPendingRequests();
3792     if (rc < 0) {
3793         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3794         pthread_mutex_unlock(&mMutex);
3795         return rc;
3796     }
3797 
3798     mFlush = false;
3799 
3800     // Start the Streams/Channels
3801     rc = startAllChannels();
3802     if (rc < 0) {
3803         ALOGE("%s: startAllChannels failed", __func__);
3804         pthread_mutex_unlock(&mMutex);
3805         return rc;
3806     }
3807 
3808     if (mChannelHandle) {
3809         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3810                     mChannelHandle);
3811         if (rc < 0) {
3812             ALOGE("%s: start_channel failed", __func__);
3813             pthread_mutex_unlock(&mMutex);
3814             return rc;
3815         }
3816     }
3817 
3818     pthread_mutex_unlock(&mMutex);
3819 
3820     return 0;
3821 }
3822 
3823 /*===========================================================================
3824  * FUNCTION   : captureResultCb
3825  *
3826  * DESCRIPTION: Callback handler for all capture result
3827  *              (streams, as well as metadata)
3828  *
3829  * PARAMETERS :
3830  *   @metadata : metadata information
3831  *   @buffer   : actual gralloc buffer to be returned to frameworks.
3832  *               NULL if metadata.
3833  *
3834  * RETURN     : NONE
3835  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)3836 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3837                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
3838 {
3839     if (metadata_buf) {
3840         if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
3841             handleBatchMetadata(metadata_buf,
3842                     true /* free_and_bufdone_meta_buf */);
3843         } else { /* mBatchSize = 0 */
3844             hdrPlusPerfLock(metadata_buf);
3845             pthread_mutex_lock(&mMutex);
3846             handleMetadataWithLock(metadata_buf,
3847                     true /* free_and_bufdone_meta_buf */);
3848             pthread_mutex_unlock(&mMutex);
3849         }
3850     } else if (isInputBuffer) {
3851         pthread_mutex_lock(&mMutex);
3852         handleInputBufferWithLock(frame_number);
3853         pthread_mutex_unlock(&mMutex);
3854     } else {
3855         pthread_mutex_lock(&mMutex);
3856         handleBufferWithLock(buffer, frame_number);
3857         pthread_mutex_unlock(&mMutex);
3858     }
3859     return;
3860 }
3861 
3862 /*===========================================================================
3863  * FUNCTION   : getReprocessibleOutputStreamId
3864  *
3865  * DESCRIPTION: Get source output stream id for the input reprocess stream
3866  *              based on size and format, which would be the largest
3867  *              output stream if an input stream exists.
3868  *
3869  * PARAMETERS :
3870  *   @id      : return the stream id if found
3871  *
3872  * RETURN     : int32_t type of status
3873  *              NO_ERROR  -- success
3874  *              none-zero failure code
3875  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)3876 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3877 {
3878     stream_info_t* stream = NULL;
3879 
3880     /* check if any output or bidirectional stream with the same size and format
3881        and return that stream */
3882     if ((mInputStreamInfo.dim.width > 0) &&
3883             (mInputStreamInfo.dim.height > 0)) {
3884         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3885                 it != mStreamInfo.end(); it++) {
3886 
3887             camera3_stream_t *stream = (*it)->stream;
3888             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3889                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3890                     (stream->format == mInputStreamInfo.format)) {
3891                 // Usage flag for an input stream and the source output stream
3892                 // may be different.
3893                 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3894                 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3895                         __func__, stream->usage, mInputStreamInfo.usage);
3896 
3897                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3898                 if (channel != NULL && channel->mStreams[0]) {
3899                     id = channel->mStreams[0]->getMyServerID();
3900                     return NO_ERROR;
3901                 }
3902             }
3903         }
3904     } else {
3905         CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3906     }
3907     return NAME_NOT_FOUND;
3908 }
3909 
3910 /*===========================================================================
3911  * FUNCTION   : lookupFwkName
3912  *
3913  * DESCRIPTION: In case the enum is not same in fwk and backend
3914  *              make sure the parameter is correctly propogated
3915  *
3916  * PARAMETERS  :
3917  *   @arr      : map between the two enums
3918  *   @len      : len of the map
3919  *   @hal_name : name of the hal_parm to map
3920  *
3921  * RETURN     : int type of status
3922  *              fwk_name  -- success
3923  *              none-zero failure code
3924  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)3925 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3926         size_t len, halType hal_name)
3927 {
3928 
3929     for (size_t i = 0; i < len; i++) {
3930         if (arr[i].hal_name == hal_name) {
3931             return arr[i].fwk_name;
3932         }
3933     }
3934 
3935     /* Not able to find matching framework type is not necessarily
3936      * an error case. This happens when mm-camera supports more attributes
3937      * than the frameworks do */
3938     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3939     return NAME_NOT_FOUND;
3940 }
3941 
3942 /*===========================================================================
3943  * FUNCTION   : lookupHalName
3944  *
3945  * DESCRIPTION: In case the enum is not same in fwk and backend
3946  *              make sure the parameter is correctly propogated
3947  *
3948  * PARAMETERS  :
3949  *   @arr      : map between the two enums
3950  *   @len      : len of the map
3951  *   @fwk_name : name of the hal_parm to map
3952  *
3953  * RETURN     : int32_t type of status
3954  *              hal_name  -- success
3955  *              none-zero failure code
3956  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)3957 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3958         size_t len, fwkType fwk_name)
3959 {
3960     for (size_t i = 0; i < len; i++) {
3961         if (arr[i].fwk_name == fwk_name) {
3962             return arr[i].hal_name;
3963         }
3964     }
3965 
3966     ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3967     return NAME_NOT_FOUND;
3968 }
3969 
3970 /*===========================================================================
3971  * FUNCTION   : lookupProp
3972  *
3973  * DESCRIPTION: lookup a value by its name
3974  *
3975  * PARAMETERS :
3976  *   @arr     : map between the two enums
3977  *   @len     : size of the map
3978  *   @name    : name to be looked up
3979  *
3980  * RETURN     : Value if found
3981  *              CAM_CDS_MODE_MAX if not found
3982  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)3983 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3984         size_t len, const char *name)
3985 {
3986     if (name) {
3987         for (size_t i = 0; i < len; i++) {
3988             if (!strcmp(arr[i].desc, name)) {
3989                 return arr[i].val;
3990             }
3991         }
3992     }
3993     return CAM_CDS_MODE_MAX;
3994 }
3995 
3996 /*===========================================================================
3997  *
3998  * DESCRIPTION:
3999  *
4000  * PARAMETERS :
4001  *   @metadata : metadata information from callback
4002  *   @timestamp: metadata buffer timestamp
4003  *   @request_id: request id
4004  *   @jpegMetadata: additional jpeg metadata
4005  *   @pprocDone: whether internal offline postprocsesing is done
4006  *
4007  * RETURN     : camera_metadata_t*
4008  *              metadata in a format specified by fwk
4009  *==========================================================================*/
4010 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,bool pprocDone)4011 QCamera3HardwareInterface::translateFromHalMetadata(
4012                                  metadata_buffer_t *metadata,
4013                                  nsecs_t timestamp,
4014                                  int32_t request_id,
4015                                  const CameraMetadata& jpegMetadata,
4016                                  uint8_t pipeline_depth,
4017                                  uint8_t capture_intent,
4018                                  bool pprocDone)
4019 {
4020     CameraMetadata camMetadata;
4021     camera_metadata_t *resultMetadata;
4022 
4023     if (jpegMetadata.entryCount())
4024         camMetadata.append(jpegMetadata);
4025 
4026     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4027     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4028     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4029     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4030 
4031     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4032         int64_t fwk_frame_number = *frame_number;
4033         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4034     }
4035 
4036     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4037         int32_t fps_range[2];
4038         fps_range[0] = (int32_t)float_range->min_fps;
4039         fps_range[1] = (int32_t)float_range->max_fps;
4040         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4041                                       fps_range, 2);
4042         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4043             __func__, fps_range[0], fps_range[1]);
4044     }
4045 
4046     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4047         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4048     }
4049 
4050     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4051         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4052                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4053                 *sceneMode);
4054         if (NAME_NOT_FOUND != val) {
4055             uint8_t fwkSceneMode = (uint8_t)val;
4056             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4057             CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4058                     __func__, fwkSceneMode);
4059         }
4060     }
4061 
4062     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4063         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4064         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4065     }
4066 
4067     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4068         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4069         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4070     }
4071 
4072     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4073         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4074         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4075     }
4076 
4077     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4078             CAM_INTF_META_EDGE_MODE, metadata) {
4079         uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4080         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4081     }
4082 
4083     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4084         uint8_t fwk_flashPower = (uint8_t) *flashPower;
4085         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4086     }
4087 
4088     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4089         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4090     }
4091 
4092     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4093         if (0 <= *flashState) {
4094             uint8_t fwk_flashState = (uint8_t) *flashState;
4095             if (!gCamCapability[mCameraId]->flash_available) {
4096                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4097             }
4098             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4099         }
4100     }
4101 
4102     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4103         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4104         if (NAME_NOT_FOUND != val) {
4105             uint8_t fwk_flashMode = (uint8_t)val;
4106             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4107         }
4108     }
4109 
4110     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4111         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4112         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4113     }
4114 
4115     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4116         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4117     }
4118 
4119     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4120         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4121     }
4122 
4123     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4124         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4125     }
4126 
4127     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4128         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4129         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4130     }
4131 
4132     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4133         uint8_t fwk_videoStab = (uint8_t) *videoStab;
4134         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4135     }
4136 
4137     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4138         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4139         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4140     }
4141 
4142     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4143         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4144     }
4145 
4146     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4147         CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4148 
4149         CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4150           blackLevelSourcePattern->cam_black_level[0],
4151           blackLevelSourcePattern->cam_black_level[1],
4152           blackLevelSourcePattern->cam_black_level[2],
4153           blackLevelSourcePattern->cam_black_level[3]);
4154     }
4155 
4156     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4157         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4158         float fwk_blackLevelInd[4];
4159 
4160         fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4161         fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4162         fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4163         fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4164 
4165         CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4166           blackLevelAppliedPattern->cam_black_level[0],
4167           blackLevelAppliedPattern->cam_black_level[1],
4168           blackLevelAppliedPattern->cam_black_level[2],
4169           blackLevelAppliedPattern->cam_black_level[3]);
4170         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4171         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4172     }
4173 
4174 
4175     if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4176         gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4177         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4178         for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4179             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4180         }
4181         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4182                 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4183     }
4184 
4185     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4186             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4187         int32_t scalerCropRegion[4];
4188         scalerCropRegion[0] = hScalerCropRegion->left;
4189         scalerCropRegion[1] = hScalerCropRegion->top;
4190         scalerCropRegion[2] = hScalerCropRegion->width;
4191         scalerCropRegion[3] = hScalerCropRegion->height;
4192 
4193         // Adjust crop region from sensor output coordinate system to active
4194         // array coordinate system.
4195         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4196                 scalerCropRegion[2], scalerCropRegion[3]);
4197 
4198         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4199     }
4200 
4201     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4202         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4203         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4204     }
4205 
4206     IF_META_AVAILABLE(int64_t, sensorFameDuration,
4207             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4208         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4209         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4210     }
4211 
4212     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4213             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4214         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4215         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4216                 sensorRollingShutterSkew, 1);
4217     }
4218 
4219     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4220         CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4221         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4222 
4223         //calculate the noise profile based on sensitivity
4224         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4225         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4226         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4227         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4228             noise_profile[i]   = noise_profile_S;
4229             noise_profile[i+1] = noise_profile_O;
4230         }
4231         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4232                 noise_profile_S, noise_profile_O);
4233         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4234                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4235     }
4236 
4237     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4238         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4239         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4240     }
4241 
4242     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4243         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4244                 *faceDetectMode);
4245         if (NAME_NOT_FOUND != val) {
4246             uint8_t fwk_faceDetectMode = (uint8_t)val;
4247             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4248 
4249             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4250                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4251                         CAM_INTF_META_FACE_DETECTION, metadata) {
4252                     uint8_t numFaces = MIN(
4253                             faceDetectionInfo->num_faces_detected, MAX_ROI);
4254                     int32_t faceIds[MAX_ROI];
4255                     uint8_t faceScores[MAX_ROI];
4256                     int32_t faceRectangles[MAX_ROI * 4];
4257                     int32_t faceLandmarks[MAX_ROI * 6];
4258                     size_t j = 0, k = 0;
4259 
4260                     for (size_t i = 0; i < numFaces; i++) {
4261                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4262                         // Adjust crop region from sensor output coordinate system to active
4263                         // array coordinate system.
4264                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4265                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
4266                                 rect.width, rect.height);
4267 
4268                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4269                                 faceRectangles+j, -1);
4270 
4271                         // Map the co-ordinate sensor output coordinate system to active
4272                         // array coordinate system.
4273                         cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4274                         mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4275                                 face.left_eye_center.y);
4276                         mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4277                                 face.right_eye_center.y);
4278                         mCropRegionMapper.toActiveArray(face.mouth_center.x,
4279                                 face.mouth_center.y);
4280 
4281                         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4282                         j+= 4;
4283                         k+= 6;
4284                     }
4285                     if (numFaces <= 0) {
4286                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4287                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4288                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4289                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4290                     }
4291 
4292                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4293                             numFaces);
4294                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4295                             faceRectangles, numFaces * 4U);
4296                     if (fwk_faceDetectMode ==
4297                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4298                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4299                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4300                                 faceLandmarks, numFaces * 6U);
4301                    }
4302                 }
4303             }
4304         }
4305     }
4306 
4307     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4308         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4309         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4310     }
4311 
4312     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4313             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4314         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4315         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4316     }
4317 
4318     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4319             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4320         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4321                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4322     }
4323 
4324     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4325             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4326         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4327                 CAM_MAX_SHADING_MAP_HEIGHT);
4328         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4329                 CAM_MAX_SHADING_MAP_WIDTH);
4330         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4331                 lensShadingMap->lens_shading, 4U * map_width * map_height);
4332     }
4333 
4334     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4335         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4336         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4337     }
4338 
4339     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4340         //Populate CAM_INTF_META_TONEMAP_CURVES
4341         /* ch0 = G, ch 1 = B, ch 2 = R*/
4342         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4343             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4344                     __func__, tonemap->tonemap_points_cnt,
4345                     CAM_MAX_TONEMAP_CURVE_SIZE);
4346             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4347         }
4348 
4349         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4350                         &tonemap->curves[0].tonemap_points[0][0],
4351                         tonemap->tonemap_points_cnt * 2);
4352 
4353         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4354                         &tonemap->curves[1].tonemap_points[0][0],
4355                         tonemap->tonemap_points_cnt * 2);
4356 
4357         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4358                         &tonemap->curves[2].tonemap_points[0][0],
4359                         tonemap->tonemap_points_cnt * 2);
4360     }
4361 
4362     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4363             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4364         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4365                 CC_GAINS_COUNT);
4366     }
4367 
4368     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4369             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4370         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4371                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4372                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
4373     }
4374 
4375     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4376             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4377         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4378             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4379                     __func__, toneCurve->tonemap_points_cnt,
4380                     CAM_MAX_TONEMAP_CURVE_SIZE);
4381             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4382         }
4383         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4384                 (float*)toneCurve->curve.tonemap_points,
4385                 toneCurve->tonemap_points_cnt * 2);
4386     }
4387 
4388     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4389             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4390         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4391                 predColorCorrectionGains->gains, 4);
4392     }
4393 
4394     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4395             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4396         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4397                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4398                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
4399     }
4400 
4401     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4402         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4403     }
4404 
4405     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4406         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4407         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4408     }
4409 
4410     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4411         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4412         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4413     }
4414 
4415     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4416         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4417                 *effectMode);
4418         if (NAME_NOT_FOUND != val) {
4419             uint8_t fwk_effectMode = (uint8_t)val;
4420             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4421         }
4422     }
4423 
4424     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4425             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4426         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4427                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4428         if (NAME_NOT_FOUND != fwk_testPatternMode) {
4429             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4430         }
4431         int32_t fwk_testPatternData[4];
4432         fwk_testPatternData[0] = testPatternData->r;
4433         fwk_testPatternData[3] = testPatternData->b;
4434         switch (gCamCapability[mCameraId]->color_arrangement) {
4435         case CAM_FILTER_ARRANGEMENT_RGGB:
4436         case CAM_FILTER_ARRANGEMENT_GRBG:
4437             fwk_testPatternData[1] = testPatternData->gr;
4438             fwk_testPatternData[2] = testPatternData->gb;
4439             break;
4440         case CAM_FILTER_ARRANGEMENT_GBRG:
4441         case CAM_FILTER_ARRANGEMENT_BGGR:
4442             fwk_testPatternData[2] = testPatternData->gr;
4443             fwk_testPatternData[1] = testPatternData->gb;
4444             break;
4445         default:
4446             ALOGE("%s: color arrangement %d is not supported", __func__,
4447                 gCamCapability[mCameraId]->color_arrangement);
4448             break;
4449         }
4450         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4451     }
4452 
4453     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4454         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4455     }
4456 
4457     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4458         String8 str((const char *)gps_methods);
4459         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4460     }
4461 
4462     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4463         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4464     }
4465 
4466     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4467         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4468     }
4469 
4470     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4471         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4472         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4473     }
4474 
4475     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4476         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4477         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4478     }
4479 
4480     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4481         int32_t fwk_thumb_size[2];
4482         fwk_thumb_size[0] = thumb_size->width;
4483         fwk_thumb_size[1] = thumb_size->height;
4484         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4485     }
4486 
4487     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4488         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4489                 privateData,
4490                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4491     }
4492 
4493     if (metadata->is_tuning_params_valid) {
4494         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4495         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4496         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4497 
4498 
4499         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4500                 sizeof(uint32_t));
4501         data += sizeof(uint32_t);
4502 
4503         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4504                 sizeof(uint32_t));
4505         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4506         data += sizeof(uint32_t);
4507 
4508         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4509                 sizeof(uint32_t));
4510         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4511         data += sizeof(uint32_t);
4512 
4513         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4514                 sizeof(uint32_t));
4515         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4516         data += sizeof(uint32_t);
4517 
4518         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4519                 sizeof(uint32_t));
4520         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4521         data += sizeof(uint32_t);
4522 
4523         metadata->tuning_params.tuning_mod3_data_size = 0;
4524         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4525                 sizeof(uint32_t));
4526         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4527         data += sizeof(uint32_t);
4528 
4529         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4530                 TUNING_SENSOR_DATA_MAX);
4531         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4532                 count);
4533         data += count;
4534 
4535         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4536                 TUNING_VFE_DATA_MAX);
4537         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4538                 count);
4539         data += count;
4540 
4541         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4542                 TUNING_CPP_DATA_MAX);
4543         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4544                 count);
4545         data += count;
4546 
4547         count = MIN(metadata->tuning_params.tuning_cac_data_size,
4548                 TUNING_CAC_DATA_MAX);
4549         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4550                 count);
4551         data += count;
4552 
4553         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4554                 (int32_t *)(void *)tuning_meta_data_blob,
4555                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4556     }
4557 
4558     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4559             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4560         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4561                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4562                 NEUTRAL_COL_POINTS);
4563     }
4564 
4565     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4566         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4567         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4568     }
4569 
4570     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4571         int32_t aeRegions[REGIONS_TUPLE_COUNT];
4572         // Adjust crop region from sensor output coordinate system to active
4573         // array coordinate system.
4574         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4575                 hAeRegions->rect.width, hAeRegions->rect.height);
4576 
4577         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4578         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4579                 REGIONS_TUPLE_COUNT);
4580         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4581                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4582                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4583                 hAeRegions->rect.height);
4584     }
4585 
4586     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4587         uint8_t fwk_afState = (uint8_t) *afState;
4588         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4589         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4590     }
4591 
4592     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4593         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4594     }
4595 
4596     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4597         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4598     }
4599 
4600     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4601         uint8_t fwk_lensState = *lensState;
4602         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4603     }
4604 
4605     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4606         /*af regions*/
4607         int32_t afRegions[REGIONS_TUPLE_COUNT];
4608         // Adjust crop region from sensor output coordinate system to active
4609         // array coordinate system.
4610         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4611                 hAfRegions->rect.width, hAfRegions->rect.height);
4612 
4613         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4614         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4615                 REGIONS_TUPLE_COUNT);
4616         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4617                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4618                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4619                 hAfRegions->rect.height);
4620     }
4621 
4622     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4623         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4624                 *hal_ab_mode);
4625         if (NAME_NOT_FOUND != val) {
4626             uint8_t fwk_ab_mode = (uint8_t)val;
4627             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4628         }
4629     }
4630 
4631     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4632         int val = lookupFwkName(SCENE_MODES_MAP,
4633                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4634         if (NAME_NOT_FOUND != val) {
4635             uint8_t fwkBestshotMode = (uint8_t)val;
4636             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4637             CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4638         } else {
4639             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4640         }
4641     }
4642 
4643     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4644          uint8_t fwk_mode = (uint8_t) *mode;
4645          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4646     }
4647 
4648     /* Constant metadata values to be update*/
4649     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4650     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4651 
4652     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4653     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4654 
4655     int32_t hotPixelMap[2];
4656     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4657 
4658     // CDS
4659     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4660         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4661     }
4662 
4663     // TNR
4664     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4665         uint8_t tnr_enable       = tnr->denoise_enable;
4666         int32_t tnr_process_type = (int32_t)tnr->process_plates;
4667 
4668         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4669         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4670     }
4671 
4672     // Reprocess crop data
4673     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4674         uint8_t cnt = crop_data->num_of_streams;
4675         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4676             // mm-qcamera-daemon only posts crop_data for streams
4677             // not linked to pproc. So no valid crop metadata is not
4678             // necessarily an error case.
4679             CDBG("%s: No valid crop metadata entries", __func__);
4680         } else {
4681             uint32_t reproc_stream_id;
4682             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4683                 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4684             } else {
4685                 int rc = NO_ERROR;
4686                 Vector<int32_t> roi_map;
4687                 int32_t *crop = new int32_t[cnt*4];
4688                 if (NULL == crop) {
4689                    rc = NO_MEMORY;
4690                 }
4691                 if (NO_ERROR == rc) {
4692                     int32_t streams_found = 0;
4693                     for (size_t i = 0; i < cnt; i++) {
4694                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4695                             if (pprocDone) {
4696                                 // HAL already does internal reprocessing,
4697                                 // either via reprocessing before JPEG encoding,
4698                                 // or offline postprocessing for pproc bypass case.
4699                                 crop[0] = 0;
4700                                 crop[1] = 0;
4701                                 crop[2] = mInputStreamInfo.dim.width;
4702                                 crop[3] = mInputStreamInfo.dim.height;
4703                             } else {
4704                                 crop[0] = crop_data->crop_info[i].crop.left;
4705                                 crop[1] = crop_data->crop_info[i].crop.top;
4706                                 crop[2] = crop_data->crop_info[i].crop.width;
4707                                 crop[3] = crop_data->crop_info[i].crop.height;
4708                             }
4709                             roi_map.add(crop_data->crop_info[i].roi_map.left);
4710                             roi_map.add(crop_data->crop_info[i].roi_map.top);
4711                             roi_map.add(crop_data->crop_info[i].roi_map.width);
4712                             roi_map.add(crop_data->crop_info[i].roi_map.height);
4713                             streams_found++;
4714                             CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4715                                     __func__,
4716                                     crop[0], crop[1], crop[2], crop[3]);
4717                             CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4718                                     __func__,
4719                                     crop_data->crop_info[i].roi_map.left,
4720                                     crop_data->crop_info[i].roi_map.top,
4721                                     crop_data->crop_info[i].roi_map.width,
4722                                     crop_data->crop_info[i].roi_map.height);
4723                             break;
4724 
4725                        }
4726                     }
4727                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4728                             &streams_found, 1);
4729                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
4730                             crop, (size_t)(streams_found * 4));
4731                     if (roi_map.array()) {
4732                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4733                                 roi_map.array(), roi_map.size());
4734                     }
4735                }
4736                if (crop) {
4737                    delete [] crop;
4738                }
4739             }
4740         }
4741     }
4742 
4743     IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4744         int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4745                 *cacMode);
4746         if (NAME_NOT_FOUND != val) {
4747             uint8_t fwkCacMode = (uint8_t)val;
4748             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4749         } else {
4750             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4751         }
4752     }
4753 
4754     // Post blob of cam_cds_data through vendor tag.
4755     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4756         uint8_t cnt = cdsInfo->num_of_streams;
4757         cam_cds_data_t cdsDataOverride;
4758         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4759         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4760         cdsDataOverride.num_of_streams = 1;
4761         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4762             uint32_t reproc_stream_id;
4763             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4764                 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4765             } else {
4766                 for (size_t i = 0; i < cnt; i++) {
4767                     if (cdsInfo->cds_info[i].stream_id ==
4768                             reproc_stream_id) {
4769                         cdsDataOverride.cds_info[0].cds_enable =
4770                                 cdsInfo->cds_info[i].cds_enable;
4771                         break;
4772                     }
4773                 }
4774             }
4775         } else {
4776             CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4777         }
4778         camMetadata.update(QCAMERA3_CDS_INFO,
4779                 (uint8_t *)&cdsDataOverride,
4780                 sizeof(cam_cds_data_t));
4781     }
4782 
4783     // Ldaf calibration data
4784     if (!mLdafCalibExist) {
4785         IF_META_AVAILABLE(uint32_t, ldafCalib,
4786                 CAM_INTF_META_LDAF_EXIF, metadata) {
4787             mLdafCalibExist = true;
4788             mLdafCalib[0] = ldafCalib[0];
4789             mLdafCalib[1] = ldafCalib[1];
4790             CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4791                     ldafCalib[0], ldafCalib[1]);
4792         }
4793     }
4794 
4795     resultMetadata = camMetadata.release();
4796     return resultMetadata;
4797 }
4798 
4799 /*===========================================================================
4800  * FUNCTION   : saveExifParams
4801  *
4802  * DESCRIPTION:
4803  *
4804  * PARAMETERS :
4805  *   @metadata : metadata information from callback
4806  *
4807  * RETURN     : none
4808  *
4809  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)4810 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4811 {
4812     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4813             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4814         mExifParams.ae_debug_params = *ae_exif_debug_params;
4815         mExifParams.ae_debug_params_valid = TRUE;
4816     }
4817     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4818             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4819         mExifParams.awb_debug_params = *awb_exif_debug_params;
4820         mExifParams.awb_debug_params_valid = TRUE;
4821     }
4822     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4823             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4824         mExifParams.af_debug_params = *af_exif_debug_params;
4825         mExifParams.af_debug_params_valid = TRUE;
4826     }
4827     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4828             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4829         mExifParams.asd_debug_params = *asd_exif_debug_params;
4830         mExifParams.asd_debug_params_valid = TRUE;
4831     }
4832     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4833             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4834         mExifParams.stats_debug_params = *stats_exif_debug_params;
4835         mExifParams.stats_debug_params_valid = TRUE;
4836     }
4837 }
4838 
4839 /*===========================================================================
4840  * FUNCTION   : get3AExifParams
4841  *
4842  * DESCRIPTION:
4843  *
4844  * PARAMETERS : none
4845  *
4846  *
4847  * RETURN     : mm_jpeg_exif_params_t
4848  *
4849  *==========================================================================*/
get3AExifParams()4850 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4851 {
4852     return mExifParams;
4853 }
4854 
4855 /*===========================================================================
4856  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4857  *
4858  * DESCRIPTION:
4859  *
4860  * PARAMETERS :
4861  *   @metadata : metadata information from callback
4862  *
4863  * RETURN     : camera_metadata_t*
4864  *              metadata in a format specified by fwk
4865  *==========================================================================*/
4866 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)4867 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4868                                 (metadata_buffer_t *metadata)
4869 {
4870     CameraMetadata camMetadata;
4871     camera_metadata_t *resultMetadata;
4872 
4873 
4874     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4875         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4876         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4877         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4878     }
4879 
4880     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4881         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4882                 &aecTrigger->trigger, 1);
4883         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4884                 &aecTrigger->trigger_id, 1);
4885         CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4886                 __func__, aecTrigger->trigger);
4887         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4888                 aecTrigger->trigger_id);
4889     }
4890 
4891     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4892         uint8_t fwk_ae_state = (uint8_t) *ae_state;
4893         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4894         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4895     }
4896 
4897     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4898         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4899         if (NAME_NOT_FOUND != val) {
4900             uint8_t fwkAfMode = (uint8_t)val;
4901             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4902             CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4903         } else {
4904             CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4905                     val);
4906         }
4907     }
4908 
4909     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4910         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4911                 &af_trigger->trigger, 1);
4912         CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4913                 __func__, af_trigger->trigger);
4914         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4915         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4916                 af_trigger->trigger_id);
4917     }
4918 
4919     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4920         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4921                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4922         if (NAME_NOT_FOUND != val) {
4923             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4924             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4925             CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4926         } else {
4927             CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4928         }
4929     }
4930 
4931     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4932     uint32_t aeMode = CAM_AE_MODE_MAX;
4933     int32_t flashMode = CAM_FLASH_MODE_MAX;
4934     int32_t redeye = -1;
4935     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4936         aeMode = *pAeMode;
4937     }
4938     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4939         flashMode = *pFlashMode;
4940     }
4941     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4942         redeye = *pRedeye;
4943     }
4944 
4945     if (1 == redeye) {
4946         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4947         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4948     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4949         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4950                 flashMode);
4951         if (NAME_NOT_FOUND != val) {
4952             fwk_aeMode = (uint8_t)val;
4953             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4954         } else {
4955             ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4956         }
4957     } else if (aeMode == CAM_AE_MODE_ON) {
4958         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4959         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4960     } else if (aeMode == CAM_AE_MODE_OFF) {
4961         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4962         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4963     } else {
4964         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4965               "flashMode:%d, aeMode:%u!!!",
4966                 __func__, redeye, flashMode, aeMode);
4967     }
4968 
4969     resultMetadata = camMetadata.release();
4970     return resultMetadata;
4971 }
4972 
4973 /*===========================================================================
4974  * FUNCTION   : dumpMetadataToFile
4975  *
4976  * DESCRIPTION: Dumps tuning metadata to file system
4977  *
4978  * PARAMETERS :
4979  *   @meta           : tuning metadata
4980  *   @dumpFrameCount : current dump frame count
4981  *   @enabled        : Enable mask
4982  *
4983  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)4984 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4985                                                    uint32_t &dumpFrameCount,
4986                                                    bool enabled,
4987                                                    const char *type,
4988                                                    uint32_t frameNumber)
4989 {
4990     uint32_t frm_num = 0;
4991 
4992     //Some sanity checks
4993     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4994         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4995               __func__,
4996               meta.tuning_sensor_data_size,
4997               TUNING_SENSOR_DATA_MAX);
4998         return;
4999     }
5000 
5001     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5002         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5003               __func__,
5004               meta.tuning_vfe_data_size,
5005               TUNING_VFE_DATA_MAX);
5006         return;
5007     }
5008 
5009     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5010         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5011               __func__,
5012               meta.tuning_cpp_data_size,
5013               TUNING_CPP_DATA_MAX);
5014         return;
5015     }
5016 
5017     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5018         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5019               __func__,
5020               meta.tuning_cac_data_size,
5021               TUNING_CAC_DATA_MAX);
5022         return;
5023     }
5024     //
5025 
5026     if(enabled){
5027         char timeBuf[FILENAME_MAX];
5028         char buf[FILENAME_MAX];
5029         memset(buf, 0, sizeof(buf));
5030         memset(timeBuf, 0, sizeof(timeBuf));
5031         time_t current_time;
5032         struct tm * timeinfo;
5033         time (&current_time);
5034         timeinfo = localtime (&current_time);
5035         if (timeinfo != NULL) {
5036             strftime (timeBuf, sizeof(timeBuf),
5037                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5038         }
5039         String8 filePath(timeBuf);
5040         snprintf(buf,
5041                 sizeof(buf),
5042                 "%dm_%s_%d.bin",
5043                 dumpFrameCount,
5044                 type,
5045                 frameNumber);
5046         filePath.append(buf);
5047         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5048         if (file_fd >= 0) {
5049             ssize_t written_len = 0;
5050             meta.tuning_data_version = TUNING_DATA_VERSION;
5051             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5052             written_len += write(file_fd, data, sizeof(uint32_t));
5053             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5054             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5055             written_len += write(file_fd, data, sizeof(uint32_t));
5056             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5057             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5058             written_len += write(file_fd, data, sizeof(uint32_t));
5059             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5060             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5061             written_len += write(file_fd, data, sizeof(uint32_t));
5062             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5063             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5064             written_len += write(file_fd, data, sizeof(uint32_t));
5065             meta.tuning_mod3_data_size = 0;
5066             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5067             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5068             written_len += write(file_fd, data, sizeof(uint32_t));
5069             size_t total_size = meta.tuning_sensor_data_size;
5070             data = (void *)((uint8_t *)&meta.data);
5071             written_len += write(file_fd, data, total_size);
5072             total_size = meta.tuning_vfe_data_size;
5073             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5074             written_len += write(file_fd, data, total_size);
5075             total_size = meta.tuning_cpp_data_size;
5076             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5077             written_len += write(file_fd, data, total_size);
5078             total_size = meta.tuning_cac_data_size;
5079             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5080             written_len += write(file_fd, data, total_size);
5081             close(file_fd);
5082         }else {
5083             ALOGE("%s: fail to open file for metadata dumping", __func__);
5084         }
5085     }
5086 }
5087 
5088 /*===========================================================================
5089  * FUNCTION   : cleanAndSortStreamInfo
5090  *
5091  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5092  *              and sort them such that raw stream is at the end of the list
5093  *              This is a workaround for camera daemon constraint.
5094  *
5095  * PARAMETERS : None
5096  *
5097  *==========================================================================*/
cleanAndSortStreamInfo()5098 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5099 {
5100     List<stream_info_t *> newStreamInfo;
5101 
5102     /*clean up invalid streams*/
5103     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5104             it != mStreamInfo.end();) {
5105         if(((*it)->status) == INVALID){
5106             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5107             delete channel;
5108             free(*it);
5109             it = mStreamInfo.erase(it);
5110         } else {
5111             it++;
5112         }
5113     }
5114 
5115     // Move preview/video/callback/snapshot streams into newList
5116     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5117             it != mStreamInfo.end();) {
5118         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5119                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5120                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5121             newStreamInfo.push_back(*it);
5122             it = mStreamInfo.erase(it);
5123         } else
5124             it++;
5125     }
5126     // Move raw streams into newList
5127     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5128             it != mStreamInfo.end();) {
5129         newStreamInfo.push_back(*it);
5130         it = mStreamInfo.erase(it);
5131     }
5132 
5133     mStreamInfo = newStreamInfo;
5134 }
5135 
5136 /*===========================================================================
5137  * FUNCTION   : extractJpegMetadata
5138  *
5139  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5140  *              JPEG metadata is cached in HAL, and return as part of capture
5141  *              result when metadata is returned from camera daemon.
5142  *
5143  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5144  *              @request:      capture request
5145  *
5146  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)5147 void QCamera3HardwareInterface::extractJpegMetadata(
5148         CameraMetadata& jpegMetadata,
5149         const camera3_capture_request_t *request)
5150 {
5151     CameraMetadata frame_settings;
5152     frame_settings = request->settings;
5153 
5154     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5155         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5156                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5157                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5158 
5159     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5160         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5161                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5162                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5163 
5164     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5165         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5166                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5167                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5168 
5169     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5170         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5171                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5172                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5173 
5174     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5175         jpegMetadata.update(ANDROID_JPEG_QUALITY,
5176                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5177                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
5178 
5179     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5180         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5181                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5182                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5183 
5184     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5185         int32_t thumbnail_size[2];
5186         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5187         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5188         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5189             int32_t orientation =
5190                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5191             if ((orientation == 90) || (orientation == 270)) {
5192                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5193                int32_t temp;
5194                temp = thumbnail_size[0];
5195                thumbnail_size[0] = thumbnail_size[1];
5196                thumbnail_size[1] = temp;
5197             }
5198          }
5199          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5200                 thumbnail_size,
5201                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5202     }
5203 
5204 }
5205 
5206 /*===========================================================================
5207  * FUNCTION   : convertToRegions
5208  *
5209  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5210  *
5211  * PARAMETERS :
5212  *   @rect   : cam_rect_t struct to convert
5213  *   @region : int32_t destination array
5214  *   @weight : if we are converting from cam_area_t, weight is valid
5215  *             else weight = -1
5216  *
5217  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)5218 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5219         int32_t *region, int weight)
5220 {
5221     region[0] = rect.left;
5222     region[1] = rect.top;
5223     region[2] = rect.left + rect.width;
5224     region[3] = rect.top + rect.height;
5225     if (weight > -1) {
5226         region[4] = weight;
5227     }
5228 }
5229 
5230 /*===========================================================================
5231  * FUNCTION   : convertFromRegions
5232  *
5233  * DESCRIPTION: helper method to convert from array to cam_rect_t
5234  *
5235  * PARAMETERS :
5236  *   @rect   : cam_rect_t struct to convert
5237  *   @region : int32_t destination array
5238  *   @weight : if we are converting from cam_area_t, weight is valid
5239  *             else weight = -1
5240  *
5241  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)5242 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5243         const camera_metadata_t *settings, uint32_t tag)
5244 {
5245     CameraMetadata frame_settings;
5246     frame_settings = settings;
5247     int32_t x_min = frame_settings.find(tag).data.i32[0];
5248     int32_t y_min = frame_settings.find(tag).data.i32[1];
5249     int32_t x_max = frame_settings.find(tag).data.i32[2];
5250     int32_t y_max = frame_settings.find(tag).data.i32[3];
5251     roi.weight = frame_settings.find(tag).data.i32[4];
5252     roi.rect.left = x_min;
5253     roi.rect.top = y_min;
5254     roi.rect.width = x_max - x_min;
5255     roi.rect.height = y_max - y_min;
5256 }
5257 
5258 /*===========================================================================
5259  * FUNCTION   : resetIfNeededROI
5260  *
5261  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5262  *              crop region
5263  *
5264  * PARAMETERS :
5265  *   @roi       : cam_area_t struct to resize
5266  *   @scalerCropRegion : cam_crop_region_t region to compare against
5267  *
5268  *
5269  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)5270 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5271                                                  const cam_crop_region_t* scalerCropRegion)
5272 {
5273     int32_t roi_x_max = roi->rect.width + roi->rect.left;
5274     int32_t roi_y_max = roi->rect.height + roi->rect.top;
5275     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5276     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5277 
5278     /* According to spec weight = 0 is used to indicate roi needs to be disabled
5279      * without having this check the calculations below to validate if the roi
5280      * is inside scalar crop region will fail resulting in the roi not being
5281      * reset causing algorithm to continue to use stale roi window
5282      */
5283     if (roi->weight == 0) {
5284         return true;
5285     }
5286 
5287     if ((roi_x_max < scalerCropRegion->left) ||
5288         // right edge of roi window is left of scalar crop's left edge
5289         (roi_y_max < scalerCropRegion->top)  ||
5290         // bottom edge of roi window is above scalar crop's top edge
5291         (roi->rect.left > crop_x_max) ||
5292         // left edge of roi window is beyond(right) of scalar crop's right edge
5293         (roi->rect.top > crop_y_max)){
5294         // top edge of roi windo is above scalar crop's top edge
5295         return false;
5296     }
5297     if (roi->rect.left < scalerCropRegion->left) {
5298         roi->rect.left = scalerCropRegion->left;
5299     }
5300     if (roi->rect.top < scalerCropRegion->top) {
5301         roi->rect.top = scalerCropRegion->top;
5302     }
5303     if (roi_x_max > crop_x_max) {
5304         roi_x_max = crop_x_max;
5305     }
5306     if (roi_y_max > crop_y_max) {
5307         roi_y_max = crop_y_max;
5308     }
5309     roi->rect.width = roi_x_max - roi->rect.left;
5310     roi->rect.height = roi_y_max - roi->rect.top;
5311     return true;
5312 }
5313 
5314 /*===========================================================================
5315  * FUNCTION   : convertLandmarks
5316  *
5317  * DESCRIPTION: helper method to extract the landmarks from face detection info
5318  *
5319  * PARAMETERS :
5320  *   @face   : cam_rect_t struct to convert
5321  *   @landmarks : int32_t destination array
5322  *
5323  *
5324  *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)5325 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5326 {
5327     landmarks[0] = (int32_t)face.left_eye_center.x;
5328     landmarks[1] = (int32_t)face.left_eye_center.y;
5329     landmarks[2] = (int32_t)face.right_eye_center.x;
5330     landmarks[3] = (int32_t)face.right_eye_center.y;
5331     landmarks[4] = (int32_t)face.mouth_center.x;
5332     landmarks[5] = (int32_t)face.mouth_center.y;
5333 }
5334 
5335 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5336 /*===========================================================================
5337  * FUNCTION   : initCapabilities
5338  *
5339  * DESCRIPTION: initialize camera capabilities in static data struct
5340  *
5341  * PARAMETERS :
5342  *   @cameraId  : camera Id
5343  *
5344  * RETURN     : int32_t type of status
5345  *              NO_ERROR  -- success
5346  *              none-zero failure code
5347  *==========================================================================*/
initCapabilities(uint32_t cameraId)5348 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5349 {
5350     int rc = 0;
5351     mm_camera_vtbl_t *cameraHandle = NULL;
5352     QCamera3HeapMemory *capabilityHeap = NULL;
5353 
5354     rc = camera_open((uint8_t)cameraId, &cameraHandle);
5355     if (rc || !cameraHandle) {
5356         ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5357         goto open_failed;
5358     }
5359 
5360     capabilityHeap = new QCamera3HeapMemory(1);
5361     if (capabilityHeap == NULL) {
5362         ALOGE("%s: creation of capabilityHeap failed", __func__);
5363         goto heap_creation_failed;
5364     }
5365     /* Allocate memory for capability buffer */
5366     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5367     if(rc != OK) {
5368         ALOGE("%s: No memory for cappability", __func__);
5369         goto allocate_failed;
5370     }
5371 
5372     /* Map memory for capability buffer */
5373     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5374     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5375                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
5376                                 capabilityHeap->getFd(0),
5377                                 sizeof(cam_capability_t));
5378     if(rc < 0) {
5379         ALOGE("%s: failed to map capability buffer", __func__);
5380         goto map_failed;
5381     }
5382 
5383     /* Query Capability */
5384     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5385     if(rc < 0) {
5386         ALOGE("%s: failed to query capability",__func__);
5387         goto query_failed;
5388     }
5389     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5390     if (!gCamCapability[cameraId]) {
5391         ALOGE("%s: out of memory", __func__);
5392         goto query_failed;
5393     }
5394     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5395                                         sizeof(cam_capability_t));
5396     rc = 0;
5397 
5398 query_failed:
5399     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5400                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
5401 map_failed:
5402     capabilityHeap->deallocate();
5403 allocate_failed:
5404     delete capabilityHeap;
5405 heap_creation_failed:
5406     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5407     cameraHandle = NULL;
5408 open_failed:
5409     return rc;
5410 }
5411 
5412 /*==========================================================================
5413  * FUNCTION   : get3Aversion
5414  *
5415  * DESCRIPTION: get the Q3A S/W version
5416  *
5417  * PARAMETERS :
5418  *  @sw_version: Reference of Q3A structure which will hold version info upon
5419  *               return
5420  *
5421  * RETURN     : None
5422  *
5423  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)5424 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5425 {
5426     if(gCamCapability[mCameraId])
5427         sw_version = gCamCapability[mCameraId]->q3a_version;
5428     else
5429         ALOGE("%s:Capability structure NULL!", __func__);
5430 }
5431 
5432 
5433 /*===========================================================================
5434  * FUNCTION   : initParameters
5435  *
5436  * DESCRIPTION: initialize camera parameters
5437  *
5438  * PARAMETERS :
5439  *
5440  * RETURN     : int32_t type of status
5441  *              NO_ERROR  -- success
5442  *              none-zero failure code
5443  *==========================================================================*/
initParameters()5444 int QCamera3HardwareInterface::initParameters()
5445 {
5446     int rc = 0;
5447 
5448     //Allocate Set Param Buffer
5449     mParamHeap = new QCamera3HeapMemory(1);
5450     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5451     if(rc != OK) {
5452         rc = NO_MEMORY;
5453         ALOGE("Failed to allocate SETPARM Heap memory");
5454         delete mParamHeap;
5455         mParamHeap = NULL;
5456         return rc;
5457     }
5458 
5459     //Map memory for parameters buffer
5460     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5461             CAM_MAPPING_BUF_TYPE_PARM_BUF,
5462             mParamHeap->getFd(0),
5463             sizeof(metadata_buffer_t));
5464     if(rc < 0) {
5465         ALOGE("%s:failed to map SETPARM buffer",__func__);
5466         rc = FAILED_TRANSACTION;
5467         mParamHeap->deallocate();
5468         delete mParamHeap;
5469         mParamHeap = NULL;
5470         return rc;
5471     }
5472 
5473     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5474 
5475     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5476     return rc;
5477 }
5478 
5479 /*===========================================================================
5480  * FUNCTION   : deinitParameters
5481  *
5482  * DESCRIPTION: de-initialize camera parameters
5483  *
5484  * PARAMETERS :
5485  *
5486  * RETURN     : NONE
5487  *==========================================================================*/
deinitParameters()5488 void QCamera3HardwareInterface::deinitParameters()
5489 {
5490     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5491             CAM_MAPPING_BUF_TYPE_PARM_BUF);
5492 
5493     mParamHeap->deallocate();
5494     delete mParamHeap;
5495     mParamHeap = NULL;
5496 
5497     mParameters = NULL;
5498 
5499     free(mPrevParameters);
5500     mPrevParameters = NULL;
5501 }
5502 
5503 /*===========================================================================
5504  * FUNCTION   : calcMaxJpegSize
5505  *
5506  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5507  *
5508  * PARAMETERS :
5509  *
5510  * RETURN     : max_jpeg_size
5511  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)5512 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5513 {
5514     size_t max_jpeg_size = 0;
5515     size_t temp_width, temp_height;
5516     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5517             MAX_SIZES_CNT);
5518     for (size_t i = 0; i < count; i++) {
5519         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5520         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5521         if (temp_width * temp_height > max_jpeg_size ) {
5522             max_jpeg_size = temp_width * temp_height;
5523         }
5524     }
5525     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5526     return max_jpeg_size;
5527 }
5528 
5529 /*===========================================================================
5530  * FUNCTION   : getMaxRawSize
5531  *
5532  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5533  *
5534  * PARAMETERS :
5535  *
5536  * RETURN     : Largest supported Raw Dimension
5537  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)5538 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5539 {
5540     int max_width = 0;
5541     cam_dimension_t maxRawSize;
5542 
5543     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5544     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5545         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5546             max_width = gCamCapability[camera_id]->raw_dim[i].width;
5547             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5548         }
5549     }
5550     return maxRawSize;
5551 }
5552 
5553 
5554 /*===========================================================================
5555  * FUNCTION   : calcMaxJpegDim
5556  *
5557  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5558  *
5559  * PARAMETERS :
5560  *
5561  * RETURN     : max_jpeg_dim
5562  *==========================================================================*/
calcMaxJpegDim()5563 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5564 {
5565     cam_dimension_t max_jpeg_dim;
5566     cam_dimension_t curr_jpeg_dim;
5567     max_jpeg_dim.width = 0;
5568     max_jpeg_dim.height = 0;
5569     curr_jpeg_dim.width = 0;
5570     curr_jpeg_dim.height = 0;
5571     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5572         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5573         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5574         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5575             max_jpeg_dim.width * max_jpeg_dim.height ) {
5576             max_jpeg_dim.width = curr_jpeg_dim.width;
5577             max_jpeg_dim.height = curr_jpeg_dim.height;
5578         }
5579     }
5580     return max_jpeg_dim;
5581 }
5582 
5583 /*===========================================================================
5584  * FUNCTION   : addStreamConfig
5585  *
5586  * DESCRIPTION: adds the stream configuration to the array
5587  *
5588  * PARAMETERS :
5589  * @available_stream_configs : pointer to stream configuration array
5590  * @scalar_format            : scalar format
5591  * @dim                      : configuration dimension
5592  * @config_type              : input or output configuration type
5593  *
5594  * RETURN     : NONE
5595  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)5596 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5597         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5598 {
5599     available_stream_configs.add(scalar_format);
5600     available_stream_configs.add(dim.width);
5601     available_stream_configs.add(dim.height);
5602     available_stream_configs.add(config_type);
5603 }
5604 
5605 
5606 /*===========================================================================
5607  * FUNCTION   : initStaticMetadata
5608  *
5609  * DESCRIPTION: initialize the static metadata
5610  *
5611  * PARAMETERS :
5612  *   @cameraId  : camera Id
5613  *
5614  * RETURN     : int32_t type of status
5615  *              0  -- success
5616  *              non-zero failure code
5617  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)5618 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5619 {
5620     int rc = 0;
5621     CameraMetadata staticInfo;
5622     size_t count = 0;
5623     bool limitedDevice = false;
5624     char prop[PROPERTY_VALUE_MAX];
5625 
5626     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5627      * guaranteed, its advertised as limited device */
5628     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5629             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5630 
5631     uint8_t supportedHwLvl = limitedDevice ?
5632             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5633             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5634 
5635     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5636             &supportedHwLvl, 1);
5637 
5638     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5639     /*HAL 3 only*/
5640     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5641                     &gCamCapability[cameraId]->min_focus_distance, 1);
5642 
5643     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5644                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
5645 
5646     /*should be using focal lengths but sensor doesn't provide that info now*/
5647     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5648                       &gCamCapability[cameraId]->focal_length,
5649                       1);
5650 
5651     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5652                       gCamCapability[cameraId]->apertures,
5653                       gCamCapability[cameraId]->apertures_count);
5654 
5655     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5656                 gCamCapability[cameraId]->filter_densities,
5657                 gCamCapability[cameraId]->filter_densities_count);
5658 
5659 
5660     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5661                       (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5662                       gCamCapability[cameraId]->optical_stab_modes_count);
5663 
5664     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5665             gCamCapability[cameraId]->lens_shading_map_size.height};
5666     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5667                       lens_shading_map_size,
5668                       sizeof(lens_shading_map_size)/sizeof(int32_t));
5669 
5670     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5671             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5672 
5673     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5674             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5675 
5676     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5677             &gCamCapability[cameraId]->max_frame_duration, 1);
5678 
5679     camera_metadata_rational baseGainFactor = {
5680             gCamCapability[cameraId]->base_gain_factor.numerator,
5681             gCamCapability[cameraId]->base_gain_factor.denominator};
5682     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5683                       &baseGainFactor, 1);
5684 
5685     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5686                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5687 
5688     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5689             gCamCapability[cameraId]->pixel_array_size.height};
5690     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5691                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5692 
5693     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5694                                                 gCamCapability[cameraId]->active_array_size.top,
5695                                                 gCamCapability[cameraId]->active_array_size.width,
5696                                                 gCamCapability[cameraId]->active_array_size.height};
5697     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5698                       active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5699 
5700     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5701             &gCamCapability[cameraId]->white_level, 1);
5702 
5703     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5704             gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5705 
5706     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5707                       &gCamCapability[cameraId]->flash_charge_duration, 1);
5708 
5709     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5710                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5711 
5712     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5713     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5714             &timestampSource, 1);
5715 
5716     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5717                       &gCamCapability[cameraId]->histogram_size, 1);
5718 
5719     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5720             &gCamCapability[cameraId]->max_histogram_count, 1);
5721 
5722     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5723             gCamCapability[cameraId]->sharpness_map_size.height};
5724 
5725     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5726             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5727 
5728     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5729             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5730 
5731     int32_t scalar_formats[] = {
5732             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5733             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5734             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5735             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5736             HAL_PIXEL_FORMAT_RAW10,
5737             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5738     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5739     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5740                       scalar_formats,
5741                       scalar_formats_count);
5742 
5743     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5744     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5745     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5746             count, MAX_SIZES_CNT, available_processed_sizes);
5747     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5748             available_processed_sizes, count * 2);
5749 
5750     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5751     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5752     makeTable(gCamCapability[cameraId]->raw_dim,
5753             count, MAX_SIZES_CNT, available_raw_sizes);
5754     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5755             available_raw_sizes, count * 2);
5756 
5757     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5758     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5759     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5760             count, MAX_SIZES_CNT, available_fps_ranges);
5761     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5762             available_fps_ranges, count * 2);
5763 
5764     camera_metadata_rational exposureCompensationStep = {
5765             gCamCapability[cameraId]->exp_compensation_step.numerator,
5766             gCamCapability[cameraId]->exp_compensation_step.denominator};
5767     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5768                       &exposureCompensationStep, 1);
5769 
5770     Vector<uint8_t> availableVstabModes;
5771     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
5772     char eis_prop[PROPERTY_VALUE_MAX];
5773     memset(eis_prop, 0, sizeof(eis_prop));
5774     property_get("persist.camera.eis.enable", eis_prop, "0");
5775     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
5776     if (facingBack && eis_prop_set) {
5777         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
5778     }
5779     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5780                       availableVstabModes.array(), availableVstabModes.size());
5781 
5782     /*HAL 1 and HAL 3 common*/
5783     float maxZoom = 4;
5784     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5785             &maxZoom, 1);
5786 
5787     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5788     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5789 
5790     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5791     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5792         max3aRegions[2] = 0; /* AF not supported */
5793     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5794             max3aRegions, 3);
5795 
5796     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
5797     memset(prop, 0, sizeof(prop));
5798     property_get("persist.camera.facedetect", prop, "1");
5799     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
5800     CDBG("%s: Support face detection mode: %d",
5801             __func__, supportedFaceDetectMode);
5802 
5803     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5804     Vector<uint8_t> availableFaceDetectModes;
5805     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
5806     if (supportedFaceDetectMode == 1) {
5807         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5808     } else if (supportedFaceDetectMode == 2) {
5809         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5810     } else if (supportedFaceDetectMode == 3) {
5811         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5812         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5813     } else {
5814         maxFaces = 0;
5815     }
5816     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5817             availableFaceDetectModes.array(),
5818             availableFaceDetectModes.size());
5819     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5820             (int32_t *)&maxFaces, 1);
5821 
5822     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5823                                            gCamCapability[cameraId]->exposure_compensation_max};
5824     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5825             exposureCompensationRange,
5826             sizeof(exposureCompensationRange)/sizeof(int32_t));
5827 
5828     uint8_t lensFacing = (facingBack) ?
5829             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5830     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5831 
5832     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5833                       available_thumbnail_sizes,
5834                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5835 
5836     /*all sizes will be clubbed into this tag*/
5837     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5838     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5839     size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5840             count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5841             gCamCapability[cameraId]->max_downscale_factor);
5842     /*android.scaler.availableStreamConfigurations*/
5843     size_t max_stream_configs_size = count * scalar_formats_count * 4;
5844     Vector<int32_t> available_stream_configs;
5845     cam_dimension_t active_array_dim;
5846     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5847     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5848     /* Add input/output stream configurations for each scalar formats*/
5849     for (size_t j = 0; j < scalar_formats_count; j++) {
5850         switch (scalar_formats[j]) {
5851         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5852         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5853         case HAL_PIXEL_FORMAT_RAW10:
5854             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5855                 addStreamConfig(available_stream_configs, scalar_formats[j],
5856                         gCamCapability[cameraId]->raw_dim[i],
5857                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5858             }
5859             break;
5860         case HAL_PIXEL_FORMAT_BLOB:
5861             cam_dimension_t jpeg_size;
5862             for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5863                 jpeg_size.width  = available_jpeg_sizes[i*2];
5864                 jpeg_size.height = available_jpeg_sizes[i*2+1];
5865                 addStreamConfig(available_stream_configs, scalar_formats[j],
5866                         jpeg_size,
5867                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5868             }
5869             break;
5870         case HAL_PIXEL_FORMAT_YCbCr_420_888:
5871         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5872         default:
5873             cam_dimension_t largest_picture_size;
5874             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5875             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5876                 addStreamConfig(available_stream_configs, scalar_formats[j],
5877                         gCamCapability[cameraId]->picture_sizes_tbl[i],
5878                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5879                 /* Book keep largest */
5880                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5881                         >= largest_picture_size.width &&
5882                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
5883                         >= largest_picture_size.height)
5884                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5885             }
5886             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5887             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5888                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5889                  addStreamConfig(available_stream_configs, scalar_formats[j],
5890                          largest_picture_size,
5891                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5892             }
5893             break;
5894         }
5895     }
5896 
5897     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5898                       available_stream_configs.array(), available_stream_configs.size());
5899     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5900     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5901 
5902     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5903     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5904 
5905     /* android.scaler.availableMinFrameDurations */
5906     int64_t available_min_durations[max_stream_configs_size];
5907     size_t idx = 0;
5908     for (size_t j = 0; j < scalar_formats_count; j++) {
5909         switch (scalar_formats[j]) {
5910         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5911         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5912         case HAL_PIXEL_FORMAT_RAW10:
5913             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5914                 available_min_durations[idx] = scalar_formats[j];
5915                 available_min_durations[idx+1] =
5916                     gCamCapability[cameraId]->raw_dim[i].width;
5917                 available_min_durations[idx+2] =
5918                     gCamCapability[cameraId]->raw_dim[i].height;
5919                 available_min_durations[idx+3] =
5920                     gCamCapability[cameraId]->raw_min_duration[i];
5921                 idx+=4;
5922             }
5923             break;
5924         default:
5925             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5926                 available_min_durations[idx] = scalar_formats[j];
5927                 available_min_durations[idx+1] =
5928                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5929                 available_min_durations[idx+2] =
5930                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5931                 available_min_durations[idx+3] =
5932                     gCamCapability[cameraId]->picture_min_duration[i];
5933                 idx+=4;
5934             }
5935             break;
5936         }
5937     }
5938     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5939                       &available_min_durations[0], idx);
5940 
5941     Vector<int32_t> available_hfr_configs;
5942     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5943         int32_t fps = 0;
5944         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5945         case CAM_HFR_MODE_60FPS:
5946             fps = 60;
5947             break;
5948         case CAM_HFR_MODE_90FPS:
5949             fps = 90;
5950             break;
5951         case CAM_HFR_MODE_120FPS:
5952             fps = 120;
5953             break;
5954         case CAM_HFR_MODE_150FPS:
5955             fps = 150;
5956             break;
5957         case CAM_HFR_MODE_180FPS:
5958             fps = 180;
5959             break;
5960         case CAM_HFR_MODE_210FPS:
5961             fps = 210;
5962             break;
5963         case CAM_HFR_MODE_240FPS:
5964             fps = 240;
5965             break;
5966         case CAM_HFR_MODE_480FPS:
5967             fps = 480;
5968             break;
5969         case CAM_HFR_MODE_OFF:
5970         case CAM_HFR_MODE_MAX:
5971         default:
5972             break;
5973         }
5974 
5975         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5976         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5977             /* For each HFR frame rate, need to advertise one variable fps range
5978              * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5979              * [120, 120]. While camcorder preview alone is running [30, 120] is
5980              * set by the app. When video recording is started, [120, 120] is
5981              * set. This way sensor configuration does not change when recording
5982              * is started */
5983 
5984             /* (width, height, fps_min, fps_max, batch_size_max) */
5985             available_hfr_configs.add(
5986                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5987             available_hfr_configs.add(
5988                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5989             available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5990             available_hfr_configs.add(fps);
5991             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5992 
5993             /* (width, height, fps_min, fps_max, batch_size_max) */
5994             available_hfr_configs.add(
5995                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5996             available_hfr_configs.add(
5997                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5998             available_hfr_configs.add(fps);
5999             available_hfr_configs.add(fps);
6000             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6001        }
6002     }
6003     //Advertise HFR capability only if the property is set
6004     memset(prop, 0, sizeof(prop));
6005     property_get("persist.camera.hal3hfr.enable", prop, "1");
6006     uint8_t hfrEnable = (uint8_t)atoi(prop);
6007 
6008     if(hfrEnable && available_hfr_configs.array()) {
6009         staticInfo.update(
6010                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6011                 available_hfr_configs.array(), available_hfr_configs.size());
6012     }
6013 
6014     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6015     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6016                       &max_jpeg_size, 1);
6017 
6018     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6019     size_t size = 0;
6020     count = CAM_EFFECT_MODE_MAX;
6021     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6022     for (size_t i = 0; i < count; i++) {
6023         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6024                 gCamCapability[cameraId]->supported_effects[i]);
6025         if (NAME_NOT_FOUND != val) {
6026             avail_effects[size] = (uint8_t)val;
6027             size++;
6028         }
6029     }
6030     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6031                       avail_effects,
6032                       size);
6033 
6034     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6035     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6036     size_t supported_scene_modes_cnt = 0;
6037     count = CAM_SCENE_MODE_MAX;
6038     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6039     for (size_t i = 0; i < count; i++) {
6040         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6041                 CAM_SCENE_MODE_OFF) {
6042             int val = lookupFwkName(SCENE_MODES_MAP,
6043                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
6044                     gCamCapability[cameraId]->supported_scene_modes[i]);
6045             if (NAME_NOT_FOUND != val) {
6046                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6047                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6048                 supported_scene_modes_cnt++;
6049             }
6050         }
6051     }
6052     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6053                       avail_scene_modes,
6054                       supported_scene_modes_cnt);
6055 
6056     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6057     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6058                       supported_scene_modes_cnt,
6059                       CAM_SCENE_MODE_MAX,
6060                       scene_mode_overrides,
6061                       supported_indexes,
6062                       cameraId);
6063 
6064     if (supported_scene_modes_cnt == 0) {
6065         supported_scene_modes_cnt = 1;
6066         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6067     }
6068 
6069     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6070             scene_mode_overrides, supported_scene_modes_cnt * 3);
6071 
6072     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6073                                          ANDROID_CONTROL_MODE_AUTO,
6074                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6075     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6076             available_control_modes,
6077             3);
6078 
6079     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6080     size = 0;
6081     count = CAM_ANTIBANDING_MODE_MAX;
6082     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6083     for (size_t i = 0; i < count; i++) {
6084         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6085                 gCamCapability[cameraId]->supported_antibandings[i]);
6086         if (NAME_NOT_FOUND != val) {
6087             avail_antibanding_modes[size] = (uint8_t)val;
6088             size++;
6089         }
6090 
6091     }
6092     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6093                       avail_antibanding_modes,
6094                       size);
6095 
6096     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6097     size = 0;
6098     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6099     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6100     if (0 == count) {
6101         avail_abberation_modes[0] =
6102                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6103         size++;
6104     } else {
6105         for (size_t i = 0; i < count; i++) {
6106             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6107                     gCamCapability[cameraId]->aberration_modes[i]);
6108             if (NAME_NOT_FOUND != val) {
6109                 avail_abberation_modes[size] = (uint8_t)val;
6110                 size++;
6111             } else {
6112                 ALOGE("%s: Invalid CAC mode %d", __func__,
6113                         gCamCapability[cameraId]->aberration_modes[i]);
6114                 break;
6115             }
6116         }
6117 
6118     }
6119     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6120             avail_abberation_modes,
6121             size);
6122 
6123     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6124     size = 0;
6125     count = CAM_FOCUS_MODE_MAX;
6126     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6127     for (size_t i = 0; i < count; i++) {
6128         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6129                 gCamCapability[cameraId]->supported_focus_modes[i]);
6130         if (NAME_NOT_FOUND != val) {
6131             avail_af_modes[size] = (uint8_t)val;
6132             size++;
6133         }
6134     }
6135     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6136                       avail_af_modes,
6137                       size);
6138 
6139     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6140     size = 0;
6141     count = CAM_WB_MODE_MAX;
6142     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6143     for (size_t i = 0; i < count; i++) {
6144         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6145                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6146                 gCamCapability[cameraId]->supported_white_balances[i]);
6147         if (NAME_NOT_FOUND != val) {
6148             avail_awb_modes[size] = (uint8_t)val;
6149             size++;
6150         }
6151     }
6152     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6153                       avail_awb_modes,
6154                       size);
6155 
6156     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6157     count = CAM_FLASH_FIRING_LEVEL_MAX;
6158     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6159             count);
6160     for (size_t i = 0; i < count; i++) {
6161         available_flash_levels[i] =
6162                 gCamCapability[cameraId]->supported_firing_levels[i];
6163     }
6164     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6165             available_flash_levels, count);
6166 
6167     uint8_t flashAvailable;
6168     if (gCamCapability[cameraId]->flash_available)
6169         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6170     else
6171         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6172     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6173             &flashAvailable, 1);
6174 
6175     Vector<uint8_t> avail_ae_modes;
6176     count = CAM_AE_MODE_MAX;
6177     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6178     for (size_t i = 0; i < count; i++) {
6179         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6180     }
6181     if (flashAvailable) {
6182         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6183         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6184     }
6185     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6186                       avail_ae_modes.array(),
6187                       avail_ae_modes.size());
6188 
6189     int32_t sensitivity_range[2];
6190     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6191     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6192     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6193                       sensitivity_range,
6194                       sizeof(sensitivity_range) / sizeof(int32_t));
6195 
6196     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6197                       &gCamCapability[cameraId]->max_analog_sensitivity,
6198                       1);
6199 
6200     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6201     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6202                       &sensor_orientation,
6203                       1);
6204 
6205     int32_t max_output_streams[] = {
6206             MAX_STALLING_STREAMS,
6207             MAX_PROCESSED_STREAMS,
6208             MAX_RAW_STREAMS};
6209     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6210             max_output_streams,
6211             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6212 
6213     uint8_t avail_leds = 0;
6214     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6215                       &avail_leds, 0);
6216 
6217     uint8_t focus_dist_calibrated;
6218     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6219             gCamCapability[cameraId]->focus_dist_calibrated);
6220     if (NAME_NOT_FOUND != val) {
6221         focus_dist_calibrated = (uint8_t)val;
6222         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6223                      &focus_dist_calibrated, 1);
6224     }
6225 
6226     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6227     size = 0;
6228     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6229             MAX_TEST_PATTERN_CNT);
6230     for (size_t i = 0; i < count; i++) {
6231         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6232                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6233         if (NAME_NOT_FOUND != testpatternMode) {
6234             avail_testpattern_modes[size] = testpatternMode;
6235             size++;
6236         }
6237     }
6238     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6239                       avail_testpattern_modes,
6240                       size);
6241 
6242     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6243     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6244                       &max_pipeline_depth,
6245                       1);
6246 
6247     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6248     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6249                       &partial_result_count,
6250                        1);
6251 
6252     int32_t max_stall_duration = MAX_REPROCESS_STALL;
6253     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6254 
6255     Vector<uint8_t> available_capabilities;
6256     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6257     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6258     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6259     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6260     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6261     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6262     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6263     if (hfrEnable && available_hfr_configs.array()) {
6264         available_capabilities.add(
6265                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6266     }
6267 
6268     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6269         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6270     }
6271     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6272             available_capabilities.array(),
6273             available_capabilities.size());
6274 
6275     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6276     //BURST_CAPTURE.
6277     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6278             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6279 
6280     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6281             &aeLockAvailable, 1);
6282 
6283     //awbLockAvailable to be set to true if capabilities has
6284     //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6285     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6286             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6287 
6288     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6289             &awbLockAvailable, 1);
6290 
6291     int32_t max_input_streams = 1;
6292     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6293                       &max_input_streams,
6294                       1);
6295 
6296     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6297     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6298             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6299             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6300             HAL_PIXEL_FORMAT_YCbCr_420_888};
6301     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6302                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6303 
6304     int32_t max_latency = (limitedDevice) ?
6305             CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6306     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6307                       &max_latency,
6308                       1);
6309 
6310     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6311                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6312     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6313             available_hot_pixel_modes,
6314             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6315 
6316     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6317                                          ANDROID_SHADING_MODE_FAST,
6318                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
6319     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6320                       available_shading_modes,
6321                       3);
6322 
6323     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6324                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6325     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6326                       available_lens_shading_map_modes,
6327                       2);
6328 
6329     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6330                                       ANDROID_EDGE_MODE_FAST,
6331                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
6332                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6333     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6334             available_edge_modes,
6335             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6336 
6337     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6338                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
6339                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6340                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6341                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6342     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6343             available_noise_red_modes,
6344             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6345 
6346     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6347                                          ANDROID_TONEMAP_MODE_FAST,
6348                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6349     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6350             available_tonemap_modes,
6351             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6352 
6353     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6354     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6355             available_hot_pixel_map_modes,
6356             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6357 
6358     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6359             gCamCapability[cameraId]->reference_illuminant1);
6360     if (NAME_NOT_FOUND != val) {
6361         uint8_t fwkReferenceIlluminant = (uint8_t)val;
6362         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6363     }
6364 
6365     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6366             gCamCapability[cameraId]->reference_illuminant2);
6367     if (NAME_NOT_FOUND != val) {
6368         uint8_t fwkReferenceIlluminant = (uint8_t)val;
6369         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6370     }
6371 
6372     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6373             (void *)gCamCapability[cameraId]->forward_matrix1,
6374             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6375 
6376     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6377             (void *)gCamCapability[cameraId]->forward_matrix2,
6378             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6379 
6380     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6381             (void *)gCamCapability[cameraId]->color_transform1,
6382             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6383 
6384     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6385             (void *)gCamCapability[cameraId]->color_transform2,
6386             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6387 
6388     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6389             (void *)gCamCapability[cameraId]->calibration_transform1,
6390             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6391 
6392     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6393             (void *)gCamCapability[cameraId]->calibration_transform2,
6394             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6395 
6396     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6397        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6398        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6399        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6400        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6401        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6402        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6403        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6404        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6405        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6406        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6407        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6408        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6409        ANDROID_JPEG_GPS_COORDINATES,
6410        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6411        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6412        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6413        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6414        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6415        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6416        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6417        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6418        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6419        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6420        ANDROID_STATISTICS_FACE_DETECT_MODE,
6421        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6422        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6423        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6424        ANDROID_BLACK_LEVEL_LOCK };
6425 
6426     size_t request_keys_cnt =
6427             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6428     Vector<int32_t> available_request_keys;
6429     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6430     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6431         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6432     }
6433 
6434     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6435             available_request_keys.array(), available_request_keys.size());
6436 
6437     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6438        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6439        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6440        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6441        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6442        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6443        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6444        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6445        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6446        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6447        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6448        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6449        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6450        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6451        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6452        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6453        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6454        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6455        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6456        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6457        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6458        ANDROID_STATISTICS_FACE_SCORES};
6459     size_t result_keys_cnt =
6460             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6461 
6462     Vector<int32_t> available_result_keys;
6463     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6464     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6465         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6466     }
6467     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6468        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6469        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6470     }
6471     if (supportedFaceDetectMode == 1) {
6472         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6473         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6474     } else if ((supportedFaceDetectMode == 2) ||
6475             (supportedFaceDetectMode == 3)) {
6476         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6477         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6478     }
6479     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6480             available_result_keys.array(), available_result_keys.size());
6481 
6482     int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6483        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6484        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6485        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6486        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6487        ANDROID_SCALER_CROPPING_TYPE,
6488        ANDROID_SYNC_MAX_LATENCY,
6489        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6490        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6491        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6492        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6493        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6494        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6495        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6496        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6497        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6498        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6499        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6500        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6501        ANDROID_LENS_FACING,
6502        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6503        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6504        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6505        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6506        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6507        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6508        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6509        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6510        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6511        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6512        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6513        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6514        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6515        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6516        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6517        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6518        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6519        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6520        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6521        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6522        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6523        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6524        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6525        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6526        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6527        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6528        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6529        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6530        ANDROID_TONEMAP_MAX_CURVE_POINTS,
6531        ANDROID_CONTROL_AVAILABLE_MODES,
6532        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6533        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6534        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6535        ANDROID_SHADING_AVAILABLE_MODES,
6536        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6537     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6538                       available_characteristics_keys,
6539                       sizeof(available_characteristics_keys)/sizeof(int32_t));
6540 
6541     /*available stall durations depend on the hw + sw and will be different for different devices */
6542     /*have to add for raw after implementation*/
6543     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6544     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6545 
6546     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6547     size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6548             MAX_SIZES_CNT);
6549     size_t available_stall_size = count * 4;
6550     int64_t available_stall_durations[available_stall_size];
6551     idx = 0;
6552     for (uint32_t j = 0; j < stall_formats_count; j++) {
6553        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6554           for (uint32_t i = 0; i < count; i++) {
6555              available_stall_durations[idx]   = stall_formats[j];
6556              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6557              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6558              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6559              idx+=4;
6560           }
6561        } else {
6562           for (uint32_t i = 0; i < raw_count; i++) {
6563              available_stall_durations[idx]   = stall_formats[j];
6564              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6565              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6566              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6567              idx+=4;
6568           }
6569        }
6570     }
6571     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6572                       available_stall_durations,
6573                       idx);
6574     //QCAMERA3_OPAQUE_RAW
6575     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6576     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6577     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6578     case LEGACY_RAW:
6579         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6580             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6581         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6582             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6583         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6584             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6585         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6586         break;
6587     case MIPI_RAW:
6588         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6589             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6590         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6591             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6592         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6593             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6594         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6595         break;
6596     default:
6597         ALOGE("%s: unknown opaque_raw_format %d", __func__,
6598                 gCamCapability[cameraId]->opaque_raw_fmt);
6599         break;
6600     }
6601     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6602 
6603     int32_t strides[3*raw_count];
6604     for (size_t i = 0; i < raw_count; i++) {
6605         cam_stream_buf_plane_info_t buf_planes;
6606         strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6607         strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6608         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6609             &gCamCapability[cameraId]->padding_info, &buf_planes);
6610         strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6611     }
6612     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6613             3*raw_count);
6614 
6615     gStaticMetadata[cameraId] = staticInfo.release();
6616     return rc;
6617 }
6618 
6619 /*===========================================================================
6620  * FUNCTION   : makeTable
6621  *
6622  * DESCRIPTION: make a table of sizes
6623  *
6624  * PARAMETERS :
6625  *
6626  *
6627  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)6628 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6629         size_t max_size, int32_t *sizeTable)
6630 {
6631     size_t j = 0;
6632     if (size > max_size) {
6633        size = max_size;
6634     }
6635     for (size_t i = 0; i < size; i++) {
6636         sizeTable[j] = dimTable[i].width;
6637         sizeTable[j+1] = dimTable[i].height;
6638         j+=2;
6639     }
6640 }
6641 
6642 /*===========================================================================
6643  * FUNCTION   : makeFPSTable
6644  *
6645  * DESCRIPTION: make a table of fps ranges
6646  *
6647  * PARAMETERS :
6648  *
6649  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)6650 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6651         size_t max_size, int32_t *fpsRangesTable)
6652 {
6653     size_t j = 0;
6654     if (size > max_size) {
6655        size = max_size;
6656     }
6657     for (size_t i = 0; i < size; i++) {
6658         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6659         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6660         j+=2;
6661     }
6662 }
6663 
6664 /*===========================================================================
6665  * FUNCTION   : makeOverridesList
6666  *
6667  * DESCRIPTION: make a list of scene mode overrides
6668  *
6669  * PARAMETERS :
6670  *
6671  *
6672  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)6673 void QCamera3HardwareInterface::makeOverridesList(
6674         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6675         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6676 {
6677     /*daemon will give a list of overrides for all scene modes.
6678       However we should send the fwk only the overrides for the scene modes
6679       supported by the framework*/
6680     size_t j = 0;
6681     if (size > max_size) {
6682        size = max_size;
6683     }
6684     size_t focus_count = CAM_FOCUS_MODE_MAX;
6685     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6686             focus_count);
6687     for (size_t i = 0; i < size; i++) {
6688         bool supt = false;
6689         size_t index = supported_indexes[i];
6690         overridesList[j] = gCamCapability[camera_id]->flash_available ?
6691                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6692         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6693                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6694                 overridesTable[index].awb_mode);
6695         if (NAME_NOT_FOUND != val) {
6696             overridesList[j+1] = (uint8_t)val;
6697         }
6698         uint8_t focus_override = overridesTable[index].af_mode;
6699         for (size_t k = 0; k < focus_count; k++) {
6700            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6701               supt = true;
6702               break;
6703            }
6704         }
6705         if (supt) {
6706             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6707                     focus_override);
6708             if (NAME_NOT_FOUND != val) {
6709                 overridesList[j+2] = (uint8_t)val;
6710             }
6711         } else {
6712            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6713         }
6714         j+=3;
6715     }
6716 }
6717 
6718 /*===========================================================================
6719  * FUNCTION   : filterJpegSizes
6720  *
6721  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6722  *              could be downscaled to
6723  *
6724  * PARAMETERS :
6725  *
6726  * RETURN     : length of jpegSizes array
6727  *==========================================================================*/
6728 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)6729 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6730         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6731         uint8_t downscale_factor)
6732 {
6733     if (0 == downscale_factor) {
6734         downscale_factor = 1;
6735     }
6736 
6737     int32_t min_width = active_array_size.width / downscale_factor;
6738     int32_t min_height = active_array_size.height / downscale_factor;
6739     size_t jpegSizesCnt = 0;
6740     if (processedSizesCnt > maxCount) {
6741         processedSizesCnt = maxCount;
6742     }
6743     for (size_t i = 0; i < processedSizesCnt; i+=2) {
6744         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6745             jpegSizes[jpegSizesCnt] = processedSizes[i];
6746             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6747             jpegSizesCnt += 2;
6748         }
6749     }
6750     return jpegSizesCnt;
6751 }
6752 
6753 /*===========================================================================
6754  * FUNCTION   : getPreviewHalPixelFormat
6755  *
6756  * DESCRIPTION: convert the format to type recognized by framework
6757  *
6758  * PARAMETERS : format : the format from backend
6759  *
6760  ** RETURN    : format recognized by framework
6761  *
6762  *==========================================================================*/
getScalarFormat(int32_t format)6763 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6764 {
6765     int32_t halPixelFormat;
6766 
6767     switch (format) {
6768     case CAM_FORMAT_YUV_420_NV12:
6769         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6770         break;
6771     case CAM_FORMAT_YUV_420_NV21:
6772         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6773         break;
6774     case CAM_FORMAT_YUV_420_NV21_ADRENO:
6775         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6776         break;
6777     case CAM_FORMAT_YUV_420_YV12:
6778         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6779         break;
6780     case CAM_FORMAT_YUV_422_NV16:
6781     case CAM_FORMAT_YUV_422_NV61:
6782     default:
6783         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6784         break;
6785     }
6786     return halPixelFormat;
6787 }
6788 
6789 /*===========================================================================
6790  * FUNCTION   : computeNoiseModelEntryS
6791  *
6792  * DESCRIPTION: function to map a given sensitivity to the S noise
6793  *              model parameters in the DNG noise model.
6794  *
6795  * PARAMETERS : sens : the sensor sensitivity
6796  *
6797  ** RETURN    : S (sensor amplification) noise
6798  *
6799  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)6800 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6801     double s = gCamCapability[mCameraId]->gradient_S * sens +
6802             gCamCapability[mCameraId]->offset_S;
6803     return ((s < 0.0) ? 0.0 : s);
6804 }
6805 
6806 /*===========================================================================
6807  * FUNCTION   : computeNoiseModelEntryO
6808  *
6809  * DESCRIPTION: function to map a given sensitivity to the O noise
6810  *              model parameters in the DNG noise model.
6811  *
6812  * PARAMETERS : sens : the sensor sensitivity
6813  *
6814  ** RETURN    : O (sensor readout) noise
6815  *
6816  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)6817 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6818     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
6819     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
6820             1.0 : (1.0 * sens / max_analog_sens);
6821     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
6822             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
6823     return ((o < 0.0) ? 0.0 : o);
6824 }
6825 
6826 /*===========================================================================
6827  * FUNCTION   : getSensorSensitivity
6828  *
6829  * DESCRIPTION: convert iso_mode to an integer value
6830  *
6831  * PARAMETERS : iso_mode : the iso_mode supported by sensor
6832  *
6833  ** RETURN    : sensitivity supported by sensor
6834  *
6835  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)6836 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6837 {
6838     int32_t sensitivity;
6839 
6840     switch (iso_mode) {
6841     case CAM_ISO_MODE_100:
6842         sensitivity = 100;
6843         break;
6844     case CAM_ISO_MODE_200:
6845         sensitivity = 200;
6846         break;
6847     case CAM_ISO_MODE_400:
6848         sensitivity = 400;
6849         break;
6850     case CAM_ISO_MODE_800:
6851         sensitivity = 800;
6852         break;
6853     case CAM_ISO_MODE_1600:
6854         sensitivity = 1600;
6855         break;
6856     default:
6857         sensitivity = -1;
6858         break;
6859     }
6860     return sensitivity;
6861 }
6862 
6863 /*===========================================================================
6864  * FUNCTION   : getCamInfo
6865  *
6866  * DESCRIPTION: query camera capabilities
6867  *
6868  * PARAMETERS :
6869  *   @cameraId  : camera Id
6870  *   @info      : camera info struct to be filled in with camera capabilities
6871  *
6872  * RETURN     : int type of status
6873  *              NO_ERROR  -- success
6874  *              none-zero failure code
6875  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)6876 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6877         struct camera_info *info)
6878 {
6879     ATRACE_CALL();
6880     int rc = 0;
6881 
6882     pthread_mutex_lock(&gCamLock);
6883     if (NULL == gCamCapability[cameraId]) {
6884         rc = initCapabilities(cameraId);
6885         if (rc < 0) {
6886             pthread_mutex_unlock(&gCamLock);
6887             return rc;
6888         }
6889     }
6890 
6891     if (NULL == gStaticMetadata[cameraId]) {
6892         rc = initStaticMetadata(cameraId);
6893         if (rc < 0) {
6894             pthread_mutex_unlock(&gCamLock);
6895             return rc;
6896         }
6897     }
6898 
6899     switch(gCamCapability[cameraId]->position) {
6900     case CAM_POSITION_BACK:
6901         info->facing = CAMERA_FACING_BACK;
6902         break;
6903 
6904     case CAM_POSITION_FRONT:
6905         info->facing = CAMERA_FACING_FRONT;
6906         break;
6907 
6908     default:
6909         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6910         rc = -1;
6911         break;
6912     }
6913 
6914 
6915     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6916     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6917     info->static_camera_characteristics = gStaticMetadata[cameraId];
6918 
6919     //For now assume both cameras can operate independently.
6920     info->conflicting_devices = NULL;
6921     info->conflicting_devices_length = 0;
6922 
6923     //resource cost is 100 * MIN(1.0, m/M),
6924     //where m is throughput requirement with maximum stream configuration
6925     //and M is CPP maximum throughput.
6926     float max_fps = 0.0;
6927     for (uint32_t i = 0;
6928             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6929         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6930             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6931     }
6932     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6933             gCamCapability[cameraId]->active_array_size.width *
6934             gCamCapability[cameraId]->active_array_size.height * max_fps /
6935             gCamCapability[cameraId]->max_pixel_bandwidth;
6936     info->resource_cost = 100 * MIN(1.0, ratio);
6937     ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6938             info->resource_cost);
6939 
6940     pthread_mutex_unlock(&gCamLock);
6941     return rc;
6942 }
6943 
6944 /*===========================================================================
6945  * FUNCTION   : translateCapabilityToMetadata
6946  *
6947  * DESCRIPTION: translate the capability into camera_metadata_t
6948  *
6949  * PARAMETERS : type of the request
6950  *
6951  *
6952  * RETURN     : success: camera_metadata_t*
6953  *              failure: NULL
6954  *
6955  *==========================================================================*/
translateCapabilityToMetadata(int type)6956 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6957 {
6958     if (mDefaultMetadata[type] != NULL) {
6959         return mDefaultMetadata[type];
6960     }
6961     //first time we are handling this request
6962     //fill up the metadata structure using the wrapper class
6963     CameraMetadata settings;
6964     //translate from cam_capability_t to camera_metadata_tag_t
6965     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6966     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6967     int32_t defaultRequestID = 0;
6968     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6969 
6970     /* OIS disable */
6971     char ois_prop[PROPERTY_VALUE_MAX];
6972     memset(ois_prop, 0, sizeof(ois_prop));
6973     property_get("persist.camera.ois.disable", ois_prop, "0");
6974     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6975 
6976     /* Force video to use OIS */
6977     char videoOisProp[PROPERTY_VALUE_MAX];
6978     memset(videoOisProp, 0, sizeof(videoOisProp));
6979     property_get("persist.camera.ois.video", videoOisProp, "1");
6980     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6981 
6982     // EIS enable/disable
6983     char eis_prop[PROPERTY_VALUE_MAX];
6984     memset(eis_prop, 0, sizeof(eis_prop));
6985     property_get("persist.camera.eis.enable", eis_prop, "0");
6986     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6987 
6988     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
6989     // This is a bit hacky. EIS is enabled only when the above setprop
6990     // is set to non-zero value and on back camera (for 2015 Nexus).
6991     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
6992     // configureStream is called before this function. In other words,
6993     // we cannot guarantee the app will call configureStream before
6994     // calling createDefaultRequest.
6995     const bool eisEnabled = facingBack && eis_prop_set;
6996 
6997     uint8_t controlIntent = 0;
6998     uint8_t focusMode;
6999     uint8_t vsMode;
7000     uint8_t optStabMode;
7001     uint8_t cacMode;
7002     uint8_t edge_mode;
7003     uint8_t noise_red_mode;
7004     uint8_t tonemap_mode;
7005     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7006     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7007     switch (type) {
7008       case CAMERA3_TEMPLATE_PREVIEW:
7009         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7010         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7011         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7012         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7013         edge_mode = ANDROID_EDGE_MODE_FAST;
7014         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7015         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7016         break;
7017       case CAMERA3_TEMPLATE_STILL_CAPTURE:
7018         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7019         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7020         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7021         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7022         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7023         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7024         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7025         break;
7026       case CAMERA3_TEMPLATE_VIDEO_RECORD:
7027         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7028         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7029         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7030         if (eisEnabled) {
7031             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7032         }
7033         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7034         edge_mode = ANDROID_EDGE_MODE_FAST;
7035         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7036         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7037         if (forceVideoOis)
7038             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7039         break;
7040       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7041         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7042         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7043         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7044         if (eisEnabled) {
7045             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7046         }
7047         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7048         edge_mode = ANDROID_EDGE_MODE_FAST;
7049         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7050         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7051         if (forceVideoOis)
7052             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7053         break;
7054       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7055         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7056         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7057         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7058         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7059         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7060         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7061         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7062         break;
7063       case CAMERA3_TEMPLATE_MANUAL:
7064         edge_mode = ANDROID_EDGE_MODE_FAST;
7065         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7066         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7067         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7068         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7069         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7070         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7071         break;
7072       default:
7073         edge_mode = ANDROID_EDGE_MODE_FAST;
7074         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7075         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7076         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7077         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7078         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7079         break;
7080     }
7081     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7082     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7083     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7084     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7085         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7086     }
7087     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7088 
7089     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7090             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7091         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7092     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7093             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7094             || ois_disable)
7095         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7096     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7097 
7098     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7099             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7100 
7101     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7102     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7103 
7104     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7105     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7106 
7107     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7108     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7109 
7110     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7111     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7112 
7113     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7114     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7115 
7116     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7117     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7118 
7119     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7120     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7121 
7122     /*flash*/
7123     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7124     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7125 
7126     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7127     settings.update(ANDROID_FLASH_FIRING_POWER,
7128             &flashFiringLevel, 1);
7129 
7130     /* lens */
7131     float default_aperture = gCamCapability[mCameraId]->apertures[0];
7132     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7133 
7134     if (gCamCapability[mCameraId]->filter_densities_count) {
7135         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7136         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7137                         gCamCapability[mCameraId]->filter_densities_count);
7138     }
7139 
7140     float default_focal_length = gCamCapability[mCameraId]->focal_length;
7141     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7142 
7143     float default_focus_distance = 0;
7144     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7145 
7146     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7147     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7148 
7149     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7150     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7151 
7152     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7153     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7154 
7155     /* face detection (default to OFF) */
7156     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7157     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7158 
7159     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7160     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7161 
7162     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7163     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7164 
7165     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7166     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7167 
7168     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7169     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7170 
7171     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7172     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7173 
7174     /* Exposure time(Update the Min Exposure Time)*/
7175     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7176     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7177 
7178     /* frame duration */
7179     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7180     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7181 
7182     /* sensitivity */
7183     static const int32_t default_sensitivity = 100;
7184     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7185 
7186     /*edge mode*/
7187     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7188 
7189     /*noise reduction mode*/
7190     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7191 
7192     /*color correction mode*/
7193     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7194     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7195 
7196     /*transform matrix mode*/
7197     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7198 
7199     int32_t scaler_crop_region[4];
7200     scaler_crop_region[0] = 0;
7201     scaler_crop_region[1] = 0;
7202     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7203     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7204     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7205 
7206     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7207     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7208 
7209     /*focus distance*/
7210     float focus_distance = 0.0;
7211     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7212 
7213     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7214     float max_range = 0.0;
7215     float max_fixed_fps = 0.0;
7216     int32_t fps_range[2] = {0, 0};
7217     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7218             i++) {
7219         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7220             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7221         if (type == CAMERA3_TEMPLATE_PREVIEW ||
7222                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7223                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7224             if (range > max_range) {
7225                 fps_range[0] =
7226                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7227                 fps_range[1] =
7228                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7229                 max_range = range;
7230             }
7231         } else {
7232             if (range < 0.01 && max_fixed_fps <
7233                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7234                 fps_range[0] =
7235                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7236                 fps_range[1] =
7237                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7238                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7239             }
7240         }
7241     }
7242     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7243 
7244     /*precapture trigger*/
7245     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7246     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7247 
7248     /*af trigger*/
7249     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7250     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7251 
7252     /* ae & af regions */
7253     int32_t active_region[] = {
7254             gCamCapability[mCameraId]->active_array_size.left,
7255             gCamCapability[mCameraId]->active_array_size.top,
7256             gCamCapability[mCameraId]->active_array_size.left +
7257                     gCamCapability[mCameraId]->active_array_size.width,
7258             gCamCapability[mCameraId]->active_array_size.top +
7259                     gCamCapability[mCameraId]->active_array_size.height,
7260             0};
7261     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7262             sizeof(active_region) / sizeof(active_region[0]));
7263     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7264             sizeof(active_region) / sizeof(active_region[0]));
7265 
7266     /* black level lock */
7267     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7268     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7269 
7270     /* lens shading map mode */
7271     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7272     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7273         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7274     }
7275     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7276 
7277     //special defaults for manual template
7278     if (type == CAMERA3_TEMPLATE_MANUAL) {
7279         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7280         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7281 
7282         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7283         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7284 
7285         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7286         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7287 
7288         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7289         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7290 
7291         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7292         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7293 
7294         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7295         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7296     }
7297 
7298 
7299     /* TNR
7300      * We'll use this location to determine which modes TNR will be set.
7301      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7302      * This is not to be confused with linking on a per stream basis that decision
7303      * is still on per-session basis and will be handled as part of config stream
7304      */
7305     uint8_t tnr_enable = 0;
7306 
7307     if (m_bTnrPreview || m_bTnrVideo) {
7308 
7309         switch (type) {
7310             case CAMERA3_TEMPLATE_VIDEO_RECORD:
7311             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7312                     tnr_enable = 1;
7313                     break;
7314 
7315             default:
7316                     tnr_enable = 0;
7317                     break;
7318         }
7319 
7320         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7321         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7322         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7323 
7324         CDBG("%s: TNR:%d with process plate %d for template:%d",
7325                             __func__, tnr_enable, tnr_process_type, type);
7326     }
7327 
7328     /* CDS default */
7329     char prop[PROPERTY_VALUE_MAX];
7330     memset(prop, 0, sizeof(prop));
7331     property_get("persist.camera.CDS", prop, "Auto");
7332     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7333     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7334     if (CAM_CDS_MODE_MAX == cds_mode) {
7335         cds_mode = CAM_CDS_MODE_AUTO;
7336     }
7337     m_CdsPreference = cds_mode;
7338 
7339     /* Disabling CDS in templates which have TNR enabled*/
7340     if (tnr_enable)
7341         cds_mode = CAM_CDS_MODE_OFF;
7342 
7343     int32_t mode = cds_mode;
7344     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7345     mDefaultMetadata[type] = settings.release();
7346 
7347     return mDefaultMetadata[type];
7348 }
7349 
7350 /*===========================================================================
7351  * FUNCTION   : setFrameParameters
7352  *
7353  * DESCRIPTION: set parameters per frame as requested in the metadata from
7354  *              framework
7355  *
7356  * PARAMETERS :
7357  *   @request   : request that needs to be serviced
7358  *   @streamID : Stream ID of all the requested streams
7359  *   @blob_request: Whether this request is a blob request or not
7360  *
7361  * RETURN     : success: NO_ERROR
7362  *              failure:
7363  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,int blob_request,uint32_t snapshotStreamId)7364 int QCamera3HardwareInterface::setFrameParameters(
7365                     camera3_capture_request_t *request,
7366                     cam_stream_ID_t streamID,
7367                     int blob_request,
7368                     uint32_t snapshotStreamId)
7369 {
7370     /*translate from camera_metadata_t type to parm_type_t*/
7371     int rc = 0;
7372     int32_t hal_version = CAM_HAL_V3;
7373 
7374     clear_metadata_buffer(mParameters);
7375     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7376         ALOGE("%s: Failed to set hal version in the parameters", __func__);
7377         return BAD_VALUE;
7378     }
7379 
7380     /*we need to update the frame number in the parameters*/
7381     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7382             request->frame_number)) {
7383         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7384         return BAD_VALUE;
7385     }
7386 
7387     /* Update stream id of all the requested buffers */
7388     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7389         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7390         return BAD_VALUE;
7391     }
7392 
7393     if (mUpdateDebugLevel) {
7394         uint32_t dummyDebugLevel = 0;
7395         /* The value of dummyDebugLevel is irrelavent. On
7396          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7397         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7398                 dummyDebugLevel)) {
7399             ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7400             return BAD_VALUE;
7401         }
7402         mUpdateDebugLevel = false;
7403     }
7404 
7405     if(request->settings != NULL){
7406         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7407         if (blob_request)
7408             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7409     }
7410 
7411     return rc;
7412 }
7413 
7414 /*===========================================================================
7415  * FUNCTION   : setReprocParameters
7416  *
7417  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7418  *              return it.
7419  *
7420  * PARAMETERS :
7421  *   @request   : request that needs to be serviced
7422  *
7423  * RETURN     : success: NO_ERROR
7424  *              failure:
7425  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)7426 int32_t QCamera3HardwareInterface::setReprocParameters(
7427         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7428         uint32_t snapshotStreamId)
7429 {
7430     /*translate from camera_metadata_t type to parm_type_t*/
7431     int rc = 0;
7432 
7433     if (NULL == request->settings){
7434         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7435         return BAD_VALUE;
7436     }
7437 
7438     if (NULL == reprocParam) {
7439         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7440         return BAD_VALUE;
7441     }
7442     clear_metadata_buffer(reprocParam);
7443 
7444     /*we need to update the frame number in the parameters*/
7445     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7446             request->frame_number)) {
7447         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7448         return BAD_VALUE;
7449     }
7450 
7451     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7452     if (rc < 0) {
7453         ALOGE("%s: Failed to translate reproc request", __func__);
7454         return rc;
7455     }
7456 
7457     CameraMetadata frame_settings;
7458     frame_settings = request->settings;
7459     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7460             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7461         int32_t *crop_count =
7462                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7463         int32_t *crop_data =
7464                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7465         int32_t *roi_map =
7466                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7467         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7468             cam_crop_data_t crop_meta;
7469             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7470             crop_meta.num_of_streams = 1;
7471             crop_meta.crop_info[0].crop.left   = crop_data[0];
7472             crop_meta.crop_info[0].crop.top    = crop_data[1];
7473             crop_meta.crop_info[0].crop.width  = crop_data[2];
7474             crop_meta.crop_info[0].crop.height = crop_data[3];
7475 
7476             crop_meta.crop_info[0].roi_map.left =
7477                     roi_map[0];
7478             crop_meta.crop_info[0].roi_map.top =
7479                     roi_map[1];
7480             crop_meta.crop_info[0].roi_map.width =
7481                     roi_map[2];
7482             crop_meta.crop_info[0].roi_map.height =
7483                     roi_map[3];
7484 
7485             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7486                 rc = BAD_VALUE;
7487             }
7488             CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7489                     __func__,
7490                     request->input_buffer->stream,
7491                     crop_meta.crop_info[0].crop.left,
7492                     crop_meta.crop_info[0].crop.top,
7493                     crop_meta.crop_info[0].crop.width,
7494                     crop_meta.crop_info[0].crop.height);
7495             CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7496                     __func__,
7497                     request->input_buffer->stream,
7498                     crop_meta.crop_info[0].roi_map.left,
7499                     crop_meta.crop_info[0].roi_map.top,
7500                     crop_meta.crop_info[0].roi_map.width,
7501                     crop_meta.crop_info[0].roi_map.height);
7502             } else {
7503                 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7504             }
7505     } else {
7506         ALOGE("%s: No crop data from matching output stream", __func__);
7507     }
7508 
7509     /* These settings are not needed for regular requests so handle them specially for
7510        reprocess requests; information needed for EXIF tags */
7511     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7512         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7513                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7514         if (NAME_NOT_FOUND != val) {
7515             uint32_t flashMode = (uint32_t)val;
7516             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7517                 rc = BAD_VALUE;
7518             }
7519         } else {
7520             ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7521                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7522         }
7523     } else {
7524         CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7525     }
7526 
7527     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7528         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7529         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7530             rc = BAD_VALUE;
7531         }
7532     } else {
7533         CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7534     }
7535 
7536     return rc;
7537 }
7538 
7539 /*===========================================================================
7540  * FUNCTION   : saveRequestSettings
7541  *
7542  * DESCRIPTION: Add any settings that might have changed to the request settings
7543  *              and save the settings to be applied on the frame
7544  *
7545  * PARAMETERS :
7546  *   @jpegMetadata : the extracted and/or modified jpeg metadata
7547  *   @request      : request with initial settings
7548  *
7549  * RETURN     :
7550  * camera_metadata_t* : pointer to the saved request settings
7551  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)7552 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7553         const CameraMetadata &jpegMetadata,
7554         camera3_capture_request_t *request)
7555 {
7556     camera_metadata_t *resultMetadata;
7557     CameraMetadata camMetadata;
7558     camMetadata = request->settings;
7559 
7560     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7561         int32_t thumbnail_size[2];
7562         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7563         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7564         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7565                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7566     }
7567 
7568     resultMetadata = camMetadata.release();
7569     return resultMetadata;
7570 }
7571 
7572 /*===========================================================================
7573  * FUNCTION   : setHalFpsRange
7574  *
7575  * DESCRIPTION: set FPS range parameter
7576  *
7577  *
7578  * PARAMETERS :
7579  *   @settings    : Metadata from framework
7580  *   @hal_metadata: Metadata buffer
7581  *
7582  *
7583  * RETURN     : success: NO_ERROR
7584  *              failure:
7585  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)7586 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7587         metadata_buffer_t *hal_metadata)
7588 {
7589     int32_t rc = NO_ERROR;
7590     cam_fps_range_t fps_range;
7591     fps_range.min_fps = (float)
7592             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7593     fps_range.max_fps = (float)
7594             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7595     fps_range.video_min_fps = fps_range.min_fps;
7596     fps_range.video_max_fps = fps_range.max_fps;
7597 
7598     CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7599             fps_range.min_fps, fps_range.max_fps);
7600     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7601      * follows:
7602      * ---------------------------------------------------------------|
7603      *      Video stream is absent in configure_streams               |
7604      *    (Camcorder preview before the first video record            |
7605      * ---------------------------------------------------------------|
7606      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7607      *                   |             |             | vid_min/max_fps|
7608      * ---------------------------------------------------------------|
7609      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7610      *                   |-------------|-------------|----------------|
7611      *                   |  [240, 240] |     240     |  [240, 240]    |
7612      * ---------------------------------------------------------------|
7613      *     Video stream is present in configure_streams               |
7614      * ---------------------------------------------------------------|
7615      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7616      *                   |             |             | vid_min/max_fps|
7617      * ---------------------------------------------------------------|
7618      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7619      * (camcorder prev   |-------------|-------------|----------------|
7620      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7621      *  is stopped)      |             |             |                |
7622      * ---------------------------------------------------------------|
7623      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7624      *                   |-------------|-------------|----------------|
7625      *                   |  [240, 240] |     240     |  [240, 240]    |
7626      * ---------------------------------------------------------------|
7627      * When Video stream is absent in configure_streams,
7628      * preview fps = sensor_fps / batchsize
7629      * Eg: for 240fps at batchSize 4, preview = 60fps
7630      *     for 120fps at batchSize 4, preview = 30fps
7631      *
7632      * When video stream is present in configure_streams, preview fps is as per
7633      * the ratio of preview buffers to video buffers requested in process
7634      * capture request
7635      */
7636     mBatchSize = 0;
7637     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7638         fps_range.min_fps = fps_range.video_max_fps;
7639         fps_range.video_min_fps = fps_range.video_max_fps;
7640         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7641                 fps_range.max_fps);
7642         if (NAME_NOT_FOUND != val) {
7643             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7644             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7645                 return BAD_VALUE;
7646             }
7647 
7648             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7649                 /* If batchmode is currently in progress and the fps changes,
7650                  * set the flag to restart the sensor */
7651                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7652                         (mHFRVideoFps != fps_range.max_fps)) {
7653                     mNeedSensorRestart = true;
7654                 }
7655                 mHFRVideoFps = fps_range.max_fps;
7656                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7657                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7658                     mBatchSize = MAX_HFR_BATCH_SIZE;
7659                 }
7660              }
7661             CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7662 
7663          }
7664     } else {
7665         /* HFR mode is session param in backend/ISP. This should be reset when
7666          * in non-HFR mode  */
7667         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7668         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7669             return BAD_VALUE;
7670         }
7671     }
7672     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7673         return BAD_VALUE;
7674     }
7675     CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7676             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7677     return rc;
7678 }
7679 
7680 /*===========================================================================
7681  * FUNCTION   : translateToHalMetadata
7682  *
7683  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7684  *
7685  *
7686  * PARAMETERS :
7687  *   @request  : request sent from framework
7688  *
7689  *
7690  * RETURN     : success: NO_ERROR
7691  *              failure:
7692  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)7693 int QCamera3HardwareInterface::translateToHalMetadata
7694                                   (const camera3_capture_request_t *request,
7695                                    metadata_buffer_t *hal_metadata,
7696                                    uint32_t snapshotStreamId)
7697 {
7698     int rc = 0;
7699     CameraMetadata frame_settings;
7700     frame_settings = request->settings;
7701 
7702     /* Do not change the order of the following list unless you know what you are
7703      * doing.
7704      * The order is laid out in such a way that parameters in the front of the table
7705      * may be used to override the parameters later in the table. Examples are:
7706      * 1. META_MODE should precede AEC/AWB/AF MODE
7707      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7708      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7709      * 4. Any mode should precede it's corresponding settings
7710      */
7711     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7712         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7713         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7714             rc = BAD_VALUE;
7715         }
7716         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7717         if (rc != NO_ERROR) {
7718             ALOGE("%s: extractSceneMode failed", __func__);
7719         }
7720     }
7721 
7722     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7723         uint8_t fwk_aeMode =
7724             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7725         uint8_t aeMode;
7726         int32_t redeye;
7727 
7728         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7729             aeMode = CAM_AE_MODE_OFF;
7730         } else {
7731             aeMode = CAM_AE_MODE_ON;
7732         }
7733         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7734             redeye = 1;
7735         } else {
7736             redeye = 0;
7737         }
7738 
7739         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7740                 fwk_aeMode);
7741         if (NAME_NOT_FOUND != val) {
7742             int32_t flashMode = (int32_t)val;
7743             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7744         }
7745 
7746         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7747         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7748             rc = BAD_VALUE;
7749         }
7750     }
7751 
7752     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7753         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7754         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7755                 fwk_whiteLevel);
7756         if (NAME_NOT_FOUND != val) {
7757             uint8_t whiteLevel = (uint8_t)val;
7758             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7759                 rc = BAD_VALUE;
7760             }
7761         }
7762     }
7763 
7764     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7765         uint8_t fwk_cacMode =
7766                 frame_settings.find(
7767                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7768         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7769                 fwk_cacMode);
7770         if (NAME_NOT_FOUND != val) {
7771             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7772             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7773                 rc = BAD_VALUE;
7774             }
7775         } else {
7776             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7777         }
7778     }
7779 
7780     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7781         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7782         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7783                 fwk_focusMode);
7784         if (NAME_NOT_FOUND != val) {
7785             uint8_t focusMode = (uint8_t)val;
7786             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7787                 rc = BAD_VALUE;
7788             }
7789         }
7790     }
7791 
7792     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7793         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7794         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7795                 focalDistance)) {
7796             rc = BAD_VALUE;
7797         }
7798     }
7799 
7800     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7801         uint8_t fwk_antibandingMode =
7802                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7803         int val = lookupHalName(ANTIBANDING_MODES_MAP,
7804                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7805         if (NAME_NOT_FOUND != val) {
7806             uint32_t hal_antibandingMode = (uint32_t)val;
7807             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7808                     hal_antibandingMode)) {
7809                 rc = BAD_VALUE;
7810             }
7811         }
7812     }
7813 
7814     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7815         int32_t expCompensation = frame_settings.find(
7816                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7817         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7818             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7819         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7820             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7821         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7822                 expCompensation)) {
7823             rc = BAD_VALUE;
7824         }
7825     }
7826 
7827     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7828         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7829         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7830             rc = BAD_VALUE;
7831         }
7832     }
7833     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7834         rc = setHalFpsRange(frame_settings, hal_metadata);
7835         if (rc != NO_ERROR) {
7836             ALOGE("%s: setHalFpsRange failed", __func__);
7837         }
7838     }
7839 
7840     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7841         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7842         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7843             rc = BAD_VALUE;
7844         }
7845     }
7846 
7847     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7848         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7849         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7850                 fwk_effectMode);
7851         if (NAME_NOT_FOUND != val) {
7852             uint8_t effectMode = (uint8_t)val;
7853             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7854                 rc = BAD_VALUE;
7855             }
7856         }
7857     }
7858 
7859     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7860         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7861         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7862                 colorCorrectMode)) {
7863             rc = BAD_VALUE;
7864         }
7865     }
7866 
7867     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7868         cam_color_correct_gains_t colorCorrectGains;
7869         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7870             colorCorrectGains.gains[i] =
7871                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7872         }
7873         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7874                 colorCorrectGains)) {
7875             rc = BAD_VALUE;
7876         }
7877     }
7878 
7879     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7880         cam_color_correct_matrix_t colorCorrectTransform;
7881         cam_rational_type_t transform_elem;
7882         size_t num = 0;
7883         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7884            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7885               transform_elem.numerator =
7886                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7887               transform_elem.denominator =
7888                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7889               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7890               num++;
7891            }
7892         }
7893         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7894                 colorCorrectTransform)) {
7895             rc = BAD_VALUE;
7896         }
7897     }
7898 
7899     cam_trigger_t aecTrigger;
7900     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7901     aecTrigger.trigger_id = -1;
7902     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7903         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7904         aecTrigger.trigger =
7905             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7906         aecTrigger.trigger_id =
7907             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7908         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7909                 aecTrigger)) {
7910             rc = BAD_VALUE;
7911         }
7912         CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7913                 aecTrigger.trigger, aecTrigger.trigger_id);
7914     }
7915 
7916     /*af_trigger must come with a trigger id*/
7917     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7918         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7919         cam_trigger_t af_trigger;
7920         af_trigger.trigger =
7921             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7922         af_trigger.trigger_id =
7923             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7924         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7925             rc = BAD_VALUE;
7926         }
7927         CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7928                 af_trigger.trigger, af_trigger.trigger_id);
7929     }
7930 
7931     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7932         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7933         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7934             rc = BAD_VALUE;
7935         }
7936     }
7937     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7938         cam_edge_application_t edge_application;
7939         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7940         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7941             edge_application.sharpness = 0;
7942         } else {
7943             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7944         }
7945         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7946             rc = BAD_VALUE;
7947         }
7948     }
7949 
7950     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7951         int32_t respectFlashMode = 1;
7952         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7953             uint8_t fwk_aeMode =
7954                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7955             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7956                 respectFlashMode = 0;
7957                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7958                     __func__);
7959             }
7960         }
7961         if (respectFlashMode) {
7962             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7963                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7964             CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7965             // To check: CAM_INTF_META_FLASH_MODE usage
7966             if (NAME_NOT_FOUND != val) {
7967                 uint8_t flashMode = (uint8_t)val;
7968                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7969                     rc = BAD_VALUE;
7970                 }
7971             }
7972         }
7973     }
7974 
7975     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7976         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7977         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7978             rc = BAD_VALUE;
7979         }
7980     }
7981 
7982     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7983         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7984         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7985                 flashFiringTime)) {
7986             rc = BAD_VALUE;
7987         }
7988     }
7989 
7990     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7991         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7992         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7993                 hotPixelMode)) {
7994             rc = BAD_VALUE;
7995         }
7996     }
7997 
7998     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7999         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8000         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8001                 lensAperture)) {
8002             rc = BAD_VALUE;
8003         }
8004     }
8005 
8006     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8007         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8008         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8009                 filterDensity)) {
8010             rc = BAD_VALUE;
8011         }
8012     }
8013 
8014     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8015         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8016         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8017                 focalLength)) {
8018             rc = BAD_VALUE;
8019         }
8020     }
8021 
8022     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8023         uint8_t optStabMode =
8024                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8025         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8026                 optStabMode)) {
8027             rc = BAD_VALUE;
8028         }
8029     }
8030 
8031     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8032         uint8_t videoStabMode =
8033                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8034         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8035                 videoStabMode)) {
8036             rc = BAD_VALUE;
8037         }
8038     }
8039 
8040 
8041     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8042         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8043         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8044                 noiseRedMode)) {
8045             rc = BAD_VALUE;
8046         }
8047     }
8048 
8049     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8050         float reprocessEffectiveExposureFactor =
8051             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8052         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8053                 reprocessEffectiveExposureFactor)) {
8054             rc = BAD_VALUE;
8055         }
8056     }
8057 
8058     cam_crop_region_t scalerCropRegion;
8059     bool scalerCropSet = false;
8060     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8061         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8062         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8063         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8064         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8065 
8066         // Map coordinate system from active array to sensor output.
8067         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8068                 scalerCropRegion.width, scalerCropRegion.height);
8069 
8070         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8071                 scalerCropRegion)) {
8072             rc = BAD_VALUE;
8073         }
8074         scalerCropSet = true;
8075     }
8076 
8077     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8078         int64_t sensorExpTime =
8079                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8080         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8081         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8082                 sensorExpTime)) {
8083             rc = BAD_VALUE;
8084         }
8085     }
8086 
8087     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8088         int64_t sensorFrameDuration =
8089                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8090         int64_t minFrameDuration = getMinFrameDuration(request);
8091         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8092         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8093             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8094         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8095         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8096                 sensorFrameDuration)) {
8097             rc = BAD_VALUE;
8098         }
8099     }
8100 
8101     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8102         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8103         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8104                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8105         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8106                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8107         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8108         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8109                 sensorSensitivity)) {
8110             rc = BAD_VALUE;
8111         }
8112     }
8113 
8114     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8115         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8116         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8117             rc = BAD_VALUE;
8118         }
8119     }
8120 
8121     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8122         uint8_t fwk_facedetectMode =
8123                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8124 
8125         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8126                 fwk_facedetectMode);
8127 
8128         if (NAME_NOT_FOUND != val) {
8129             uint8_t facedetectMode = (uint8_t)val;
8130             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8131                     facedetectMode)) {
8132                 rc = BAD_VALUE;
8133             }
8134         }
8135     }
8136 
8137     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8138         uint8_t histogramMode =
8139                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8140         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8141                 histogramMode)) {
8142             rc = BAD_VALUE;
8143         }
8144     }
8145 
8146     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8147         uint8_t sharpnessMapMode =
8148                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8149         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8150                 sharpnessMapMode)) {
8151             rc = BAD_VALUE;
8152         }
8153     }
8154 
8155     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8156         uint8_t tonemapMode =
8157                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8158         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8159             rc = BAD_VALUE;
8160         }
8161     }
8162     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8163     /*All tonemap channels will have the same number of points*/
8164     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8165         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8166         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8167         cam_rgb_tonemap_curves tonemapCurves;
8168         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8169         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8170             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8171                     __func__, tonemapCurves.tonemap_points_cnt,
8172                     CAM_MAX_TONEMAP_CURVE_SIZE);
8173             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8174         }
8175 
8176         /* ch0 = G*/
8177         size_t point = 0;
8178         cam_tonemap_curve_t tonemapCurveGreen;
8179         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8180             for (size_t j = 0; j < 2; j++) {
8181                tonemapCurveGreen.tonemap_points[i][j] =
8182                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8183                point++;
8184             }
8185         }
8186         tonemapCurves.curves[0] = tonemapCurveGreen;
8187 
8188         /* ch 1 = B */
8189         point = 0;
8190         cam_tonemap_curve_t tonemapCurveBlue;
8191         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8192             for (size_t j = 0; j < 2; j++) {
8193                tonemapCurveBlue.tonemap_points[i][j] =
8194                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8195                point++;
8196             }
8197         }
8198         tonemapCurves.curves[1] = tonemapCurveBlue;
8199 
8200         /* ch 2 = R */
8201         point = 0;
8202         cam_tonemap_curve_t tonemapCurveRed;
8203         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8204             for (size_t j = 0; j < 2; j++) {
8205                tonemapCurveRed.tonemap_points[i][j] =
8206                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8207                point++;
8208             }
8209         }
8210         tonemapCurves.curves[2] = tonemapCurveRed;
8211 
8212         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8213                 tonemapCurves)) {
8214             rc = BAD_VALUE;
8215         }
8216     }
8217 
8218     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8219         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8220         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8221                 captureIntent)) {
8222             rc = BAD_VALUE;
8223         }
8224     }
8225 
8226     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8227         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8228         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8229                 blackLevelLock)) {
8230             rc = BAD_VALUE;
8231         }
8232     }
8233 
8234     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8235         uint8_t lensShadingMapMode =
8236                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8237         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8238                 lensShadingMapMode)) {
8239             rc = BAD_VALUE;
8240         }
8241     }
8242 
8243     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8244         cam_area_t roi;
8245         bool reset = true;
8246         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8247 
8248         // Map coordinate system from active array to sensor output.
8249         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8250                 roi.rect.height);
8251 
8252         if (scalerCropSet) {
8253             reset = resetIfNeededROI(&roi, &scalerCropRegion);
8254         }
8255         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8256             rc = BAD_VALUE;
8257         }
8258     }
8259 
8260     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8261         cam_area_t roi;
8262         bool reset = true;
8263         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8264 
8265         // Map coordinate system from active array to sensor output.
8266         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8267                 roi.rect.height);
8268 
8269         if (scalerCropSet) {
8270             reset = resetIfNeededROI(&roi, &scalerCropRegion);
8271         }
8272         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8273             rc = BAD_VALUE;
8274         }
8275     }
8276 
8277     if (m_bIs4KVideo) {
8278         /* Override needed for Video template in case of 4K video */
8279         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8280                 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8281             rc = BAD_VALUE;
8282         }
8283     } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8284             frame_settings.exists(QCAMERA3_CDS_MODE)) {
8285         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8286         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8287             ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8288         } else {
8289             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8290                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8291                 rc = BAD_VALUE;
8292             }
8293         }
8294     }
8295 
8296     // TNR
8297     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8298         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8299         uint8_t b_TnrRequested = 0;
8300         cam_denoise_param_t tnr;
8301         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8302         tnr.process_plates =
8303             (cam_denoise_process_type_t)frame_settings.find(
8304             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8305         b_TnrRequested = tnr.denoise_enable;
8306         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8307             rc = BAD_VALUE;
8308         }
8309     }
8310 
8311     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8312         int32_t fwk_testPatternMode =
8313                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8314         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8315                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8316 
8317         if (NAME_NOT_FOUND != testPatternMode) {
8318             cam_test_pattern_data_t testPatternData;
8319             memset(&testPatternData, 0, sizeof(testPatternData));
8320             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8321             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8322                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8323                 int32_t *fwk_testPatternData =
8324                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8325                 testPatternData.r = fwk_testPatternData[0];
8326                 testPatternData.b = fwk_testPatternData[3];
8327                 switch (gCamCapability[mCameraId]->color_arrangement) {
8328                     case CAM_FILTER_ARRANGEMENT_RGGB:
8329                     case CAM_FILTER_ARRANGEMENT_GRBG:
8330                         testPatternData.gr = fwk_testPatternData[1];
8331                         testPatternData.gb = fwk_testPatternData[2];
8332                         break;
8333                     case CAM_FILTER_ARRANGEMENT_GBRG:
8334                     case CAM_FILTER_ARRANGEMENT_BGGR:
8335                         testPatternData.gr = fwk_testPatternData[2];
8336                         testPatternData.gb = fwk_testPatternData[1];
8337                         break;
8338                     default:
8339                         ALOGE("%s: color arrangement %d is not supported", __func__,
8340                                 gCamCapability[mCameraId]->color_arrangement);
8341                         break;
8342                 }
8343             }
8344             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8345                     testPatternData)) {
8346                 rc = BAD_VALUE;
8347             }
8348         } else {
8349             ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8350                     fwk_testPatternMode);
8351         }
8352     }
8353 
8354     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8355         size_t count = 0;
8356         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8357         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8358                 gps_coords.data.d, gps_coords.count, count);
8359         if (gps_coords.count != count) {
8360             rc = BAD_VALUE;
8361         }
8362     }
8363 
8364     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8365         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8366         size_t count = 0;
8367         const char *gps_methods_src = (const char *)
8368                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8369         memset(gps_methods, '\0', sizeof(gps_methods));
8370         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8371         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8372                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8373         if (GPS_PROCESSING_METHOD_SIZE != count) {
8374             rc = BAD_VALUE;
8375         }
8376     }
8377 
8378     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8379         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8380         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8381                 gps_timestamp)) {
8382             rc = BAD_VALUE;
8383         }
8384     }
8385 
8386     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8387         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8388         cam_rotation_info_t rotation_info;
8389         if (orientation == 0) {
8390            rotation_info.rotation = ROTATE_0;
8391         } else if (orientation == 90) {
8392            rotation_info.rotation = ROTATE_90;
8393         } else if (orientation == 180) {
8394            rotation_info.rotation = ROTATE_180;
8395         } else if (orientation == 270) {
8396            rotation_info.rotation = ROTATE_270;
8397         }
8398         rotation_info.streamId = snapshotStreamId;
8399         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8400         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8401             rc = BAD_VALUE;
8402         }
8403     }
8404 
8405     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8406         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8407         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8408             rc = BAD_VALUE;
8409         }
8410     }
8411 
8412     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8413         uint32_t thumb_quality = (uint32_t)
8414                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8415         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8416                 thumb_quality)) {
8417             rc = BAD_VALUE;
8418         }
8419     }
8420 
8421     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8422         cam_dimension_t dim;
8423         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8424         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8425         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8426             rc = BAD_VALUE;
8427         }
8428     }
8429 
8430     // Internal metadata
8431     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8432         size_t count = 0;
8433         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8434         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8435                 privatedata.data.i32, privatedata.count, count);
8436         if (privatedata.count != count) {
8437             rc = BAD_VALUE;
8438         }
8439     }
8440 
8441     if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8442         uint8_t* use_av_timer =
8443                 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8444         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8445             rc = BAD_VALUE;
8446         }
8447     }
8448 
8449     // EV step
8450     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8451             gCamCapability[mCameraId]->exp_compensation_step)) {
8452         rc = BAD_VALUE;
8453     }
8454 
8455     // CDS info
8456     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8457         cam_cds_data_t *cdsData = (cam_cds_data_t *)
8458                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8459 
8460         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8461                 CAM_INTF_META_CDS_DATA, *cdsData)) {
8462             rc = BAD_VALUE;
8463         }
8464     }
8465 
8466     return rc;
8467 }
8468 
8469 /*===========================================================================
8470  * FUNCTION   : captureResultCb
8471  *
8472  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8473  *
8474  * PARAMETERS :
8475  *   @frame  : frame information from mm-camera-interface
8476  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8477  *   @userdata: userdata
8478  *
8479  * RETURN     : NONE
8480  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)8481 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8482                 camera3_stream_buffer_t *buffer,
8483                 uint32_t frame_number, bool isInputBuffer, void *userdata)
8484 {
8485     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8486     if (hw == NULL) {
8487         ALOGE("%s: Invalid hw %p", __func__, hw);
8488         return;
8489     }
8490 
8491     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8492     return;
8493 }
8494 
8495 
8496 /*===========================================================================
8497  * FUNCTION   : initialize
8498  *
8499  * DESCRIPTION: Pass framework callback pointers to HAL
8500  *
8501  * PARAMETERS :
8502  *
8503  *
8504  * RETURN     : Success : 0
8505  *              Failure: -ENODEV
8506  *==========================================================================*/
8507 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)8508 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8509                                   const camera3_callback_ops_t *callback_ops)
8510 {
8511     CDBG("%s: E", __func__);
8512     QCamera3HardwareInterface *hw =
8513         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8514     if (!hw) {
8515         ALOGE("%s: NULL camera device", __func__);
8516         return -ENODEV;
8517     }
8518 
8519     int rc = hw->initialize(callback_ops);
8520     CDBG("%s: X", __func__);
8521     return rc;
8522 }
8523 
8524 /*===========================================================================
8525  * FUNCTION   : configure_streams
8526  *
8527  * DESCRIPTION:
8528  *
8529  * PARAMETERS :
8530  *
8531  *
8532  * RETURN     : Success: 0
8533  *              Failure: -EINVAL (if stream configuration is invalid)
8534  *                       -ENODEV (fatal error)
8535  *==========================================================================*/
8536 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)8537 int QCamera3HardwareInterface::configure_streams(
8538         const struct camera3_device *device,
8539         camera3_stream_configuration_t *stream_list)
8540 {
8541     CDBG("%s: E", __func__);
8542     QCamera3HardwareInterface *hw =
8543         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8544     if (!hw) {
8545         ALOGE("%s: NULL camera device", __func__);
8546         return -ENODEV;
8547     }
8548     int rc = hw->configureStreams(stream_list);
8549     CDBG("%s: X", __func__);
8550     return rc;
8551 }
8552 
8553 /*===========================================================================
8554  * FUNCTION   : construct_default_request_settings
8555  *
8556  * DESCRIPTION: Configure a settings buffer to meet the required use case
8557  *
8558  * PARAMETERS :
8559  *
8560  *
8561  * RETURN     : Success: Return valid metadata
8562  *              Failure: Return NULL
8563  *==========================================================================*/
8564 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)8565     construct_default_request_settings(const struct camera3_device *device,
8566                                         int type)
8567 {
8568 
8569     CDBG("%s: E", __func__);
8570     camera_metadata_t* fwk_metadata = NULL;
8571     QCamera3HardwareInterface *hw =
8572         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8573     if (!hw) {
8574         ALOGE("%s: NULL camera device", __func__);
8575         return NULL;
8576     }
8577 
8578     fwk_metadata = hw->translateCapabilityToMetadata(type);
8579 
8580     CDBG("%s: X", __func__);
8581     return fwk_metadata;
8582 }
8583 
8584 /*===========================================================================
8585  * FUNCTION   : process_capture_request
8586  *
8587  * DESCRIPTION:
8588  *
8589  * PARAMETERS :
8590  *
8591  *
8592  * RETURN     :
8593  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)8594 int QCamera3HardwareInterface::process_capture_request(
8595                     const struct camera3_device *device,
8596                     camera3_capture_request_t *request)
8597 {
8598     CDBG("%s: E", __func__);
8599     QCamera3HardwareInterface *hw =
8600         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8601     if (!hw) {
8602         ALOGE("%s: NULL camera device", __func__);
8603         return -EINVAL;
8604     }
8605 
8606     int rc = hw->processCaptureRequest(request);
8607     CDBG("%s: X", __func__);
8608     return rc;
8609 }
8610 
8611 /*===========================================================================
8612  * FUNCTION   : dump
8613  *
8614  * DESCRIPTION:
8615  *
8616  * PARAMETERS :
8617  *
8618  *
8619  * RETURN     :
8620  *==========================================================================*/
8621 
dump(const struct camera3_device * device,int fd)8622 void QCamera3HardwareInterface::dump(
8623                 const struct camera3_device *device, int fd)
8624 {
8625     /* Log level property is read when "adb shell dumpsys media.camera" is
8626        called so that the log level can be controlled without restarting
8627        the media server */
8628     getLogLevel();
8629 
8630     CDBG("%s: E", __func__);
8631     QCamera3HardwareInterface *hw =
8632         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8633     if (!hw) {
8634         ALOGE("%s: NULL camera device", __func__);
8635         return;
8636     }
8637 
8638     hw->dump(fd);
8639     CDBG("%s: X", __func__);
8640     return;
8641 }
8642 
8643 /*===========================================================================
8644  * FUNCTION   : flush
8645  *
8646  * DESCRIPTION:
8647  *
8648  * PARAMETERS :
8649  *
8650  *
8651  * RETURN     :
8652  *==========================================================================*/
8653 
flush(const struct camera3_device * device)8654 int QCamera3HardwareInterface::flush(
8655                 const struct camera3_device *device)
8656 {
8657     int rc;
8658     CDBG("%s: E", __func__);
8659     QCamera3HardwareInterface *hw =
8660         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8661     if (!hw) {
8662         ALOGE("%s: NULL camera device", __func__);
8663         return -EINVAL;
8664     }
8665 
8666     rc = hw->flush();
8667     CDBG("%s: X", __func__);
8668     return rc;
8669 }
8670 
8671 /*===========================================================================
8672  * FUNCTION   : close_camera_device
8673  *
8674  * DESCRIPTION:
8675  *
8676  * PARAMETERS :
8677  *
8678  *
8679  * RETURN     :
8680  *==========================================================================*/
close_camera_device(struct hw_device_t * device)8681 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8682 {
8683     CDBG("%s: E", __func__);
8684     int ret = NO_ERROR;
8685     QCamera3HardwareInterface *hw =
8686         reinterpret_cast<QCamera3HardwareInterface *>(
8687             reinterpret_cast<camera3_device_t *>(device)->priv);
8688     if (!hw) {
8689         ALOGE("NULL camera device");
8690         return BAD_VALUE;
8691     }
8692     delete hw;
8693 
8694     CDBG("%s: X", __func__);
8695     return ret;
8696 }
8697 
8698 /*===========================================================================
8699  * FUNCTION   : getWaveletDenoiseProcessPlate
8700  *
8701  * DESCRIPTION: query wavelet denoise process plate
8702  *
8703  * PARAMETERS : None
8704  *
8705  * RETURN     : WNR prcocess plate value
8706  *==========================================================================*/
getWaveletDenoiseProcessPlate()8707 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8708 {
8709     char prop[PROPERTY_VALUE_MAX];
8710     memset(prop, 0, sizeof(prop));
8711     property_get("persist.denoise.process.plates", prop, "0");
8712     int processPlate = atoi(prop);
8713     switch(processPlate) {
8714     case 0:
8715         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8716     case 1:
8717         return CAM_WAVELET_DENOISE_CBCR_ONLY;
8718     case 2:
8719         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8720     case 3:
8721         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8722     default:
8723         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8724     }
8725 }
8726 
8727 
8728 /*===========================================================================
8729  * FUNCTION   : getTemporalDenoiseProcessPlate
8730  *
8731  * DESCRIPTION: query temporal denoise process plate
8732  *
8733  * PARAMETERS : None
8734  *
8735  * RETURN     : TNR prcocess plate value
8736  *==========================================================================*/
getTemporalDenoiseProcessPlate()8737 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8738 {
8739     char prop[PROPERTY_VALUE_MAX];
8740     memset(prop, 0, sizeof(prop));
8741     property_get("persist.tnr.process.plates", prop, "0");
8742     int processPlate = atoi(prop);
8743     switch(processPlate) {
8744     case 0:
8745         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8746     case 1:
8747         return CAM_WAVELET_DENOISE_CBCR_ONLY;
8748     case 2:
8749         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8750     case 3:
8751         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8752     default:
8753         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8754     }
8755 }
8756 
8757 
8758 /*===========================================================================
8759  * FUNCTION   : extractSceneMode
8760  *
8761  * DESCRIPTION: Extract scene mode from frameworks set metadata
8762  *
8763  * PARAMETERS :
8764  *      @frame_settings: CameraMetadata reference
8765  *      @metaMode: ANDROID_CONTORL_MODE
8766  *      @hal_metadata: hal metadata structure
8767  *
8768  * RETURN     : None
8769  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)8770 int32_t QCamera3HardwareInterface::extractSceneMode(
8771         const CameraMetadata &frame_settings, uint8_t metaMode,
8772         metadata_buffer_t *hal_metadata)
8773 {
8774     int32_t rc = NO_ERROR;
8775 
8776     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8777         camera_metadata_ro_entry entry =
8778                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8779         if (0 == entry.count)
8780             return rc;
8781 
8782         uint8_t fwk_sceneMode = entry.data.u8[0];
8783 
8784         int val = lookupHalName(SCENE_MODES_MAP,
8785                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8786                 fwk_sceneMode);
8787         if (NAME_NOT_FOUND != val) {
8788             uint8_t sceneMode = (uint8_t)val;
8789             CDBG("%s: sceneMode: %d", __func__, sceneMode);
8790             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8791                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8792                 rc = BAD_VALUE;
8793             }
8794         }
8795     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8796             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8797         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8798         CDBG("%s: sceneMode: %d", __func__, sceneMode);
8799         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8800                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8801             rc = BAD_VALUE;
8802         }
8803     }
8804     return rc;
8805 }
8806 
8807 /*===========================================================================
8808  * FUNCTION   : needRotationReprocess
8809  *
8810  * DESCRIPTION: if rotation needs to be done by reprocess in pp
8811  *
8812  * PARAMETERS : none
8813  *
8814  * RETURN     : true: needed
8815  *              false: no need
8816  *==========================================================================*/
needRotationReprocess()8817 bool QCamera3HardwareInterface::needRotationReprocess()
8818 {
8819     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8820         // current rotation is not zero, and pp has the capability to process rotation
8821         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8822         return true;
8823     }
8824 
8825     return false;
8826 }
8827 
8828 /*===========================================================================
8829  * FUNCTION   : needReprocess
8830  *
8831  * DESCRIPTION: if reprocess in needed
8832  *
8833  * PARAMETERS : none
8834  *
8835  * RETURN     : true: needed
8836  *              false: no need
8837  *==========================================================================*/
needReprocess(uint32_t postprocess_mask)8838 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8839 {
8840     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8841         // TODO: add for ZSL HDR later
8842         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8843         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8844             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8845             return true;
8846         } else {
8847             CDBG_HIGH("%s: already post processed frame", __func__);
8848             return false;
8849         }
8850     }
8851     return needRotationReprocess();
8852 }
8853 
8854 /*===========================================================================
8855  * FUNCTION   : needJpegRotation
8856  *
8857  * DESCRIPTION: if rotation from jpeg is needed
8858  *
8859  * PARAMETERS : none
8860  *
8861  * RETURN     : true: needed
8862  *              false: no need
8863  *==========================================================================*/
needJpegRotation()8864 bool QCamera3HardwareInterface::needJpegRotation()
8865 {
8866    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8867     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8868        CDBG("%s: Need Jpeg to do the rotation", __func__);
8869        return true;
8870     }
8871     return false;
8872 }
8873 
8874 /*===========================================================================
8875  * FUNCTION   : addOfflineReprocChannel
8876  *
8877  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8878  *              coming from input channel
8879  *
8880  * PARAMETERS :
8881  *   @config  : reprocess configuration
8882  *   @inputChHandle : pointer to the input (source) channel
8883  *
8884  *
8885  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8886  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)8887 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8888         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8889 {
8890     int32_t rc = NO_ERROR;
8891     QCamera3ReprocessChannel *pChannel = NULL;
8892 
8893     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8894             mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
8895             CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8896     if (NULL == pChannel) {
8897         ALOGE("%s: no mem for reprocess channel", __func__);
8898         return NULL;
8899     }
8900 
8901     rc = pChannel->initialize(IS_TYPE_NONE);
8902     if (rc != NO_ERROR) {
8903         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8904         delete pChannel;
8905         return NULL;
8906     }
8907 
8908     // pp feature config
8909     cam_pp_feature_config_t pp_config;
8910     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8911 
8912     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8913 
8914     rc = pChannel->addReprocStreamsFromSource(pp_config,
8915             config,
8916             IS_TYPE_NONE,
8917             mMetadataChannel);
8918 
8919     if (rc != NO_ERROR) {
8920         delete pChannel;
8921         return NULL;
8922     }
8923     return pChannel;
8924 }
8925 
8926 /*===========================================================================
8927  * FUNCTION   : getMobicatMask
8928  *
8929  * DESCRIPTION: returns mobicat mask
8930  *
8931  * PARAMETERS : none
8932  *
8933  * RETURN     : mobicat mask
8934  *
8935  *==========================================================================*/
getMobicatMask()8936 uint8_t QCamera3HardwareInterface::getMobicatMask()
8937 {
8938     return m_MobicatMask;
8939 }
8940 
8941 /*===========================================================================
8942  * FUNCTION   : setMobicat
8943  *
8944  * DESCRIPTION: set Mobicat on/off.
8945  *
8946  * PARAMETERS :
8947  *   @params  : none
8948  *
8949  * RETURN     : int32_t type of status
8950  *              NO_ERROR  -- success
8951  *              none-zero failure code
8952  *==========================================================================*/
setMobicat()8953 int32_t QCamera3HardwareInterface::setMobicat()
8954 {
8955     char value [PROPERTY_VALUE_MAX];
8956     property_get("persist.camera.mobicat", value, "0");
8957     int32_t ret = NO_ERROR;
8958     uint8_t enableMobi = (uint8_t)atoi(value);
8959 
8960     if (enableMobi) {
8961         tune_cmd_t tune_cmd;
8962         tune_cmd.type = SET_RELOAD_CHROMATIX;
8963         tune_cmd.module = MODULE_ALL;
8964         tune_cmd.value = TRUE;
8965         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8966                 CAM_INTF_PARM_SET_VFE_COMMAND,
8967                 tune_cmd);
8968 
8969         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8970                 CAM_INTF_PARM_SET_PP_COMMAND,
8971                 tune_cmd);
8972     }
8973     m_MobicatMask = enableMobi;
8974 
8975     return ret;
8976 }
8977 
8978 /*===========================================================================
8979 * FUNCTION   : getLogLevel
8980 *
8981 * DESCRIPTION: Reads the log level property into a variable
8982 *
8983 * PARAMETERS :
8984 *   None
8985 *
8986 * RETURN     :
8987 *   None
8988 *==========================================================================*/
getLogLevel()8989 void QCamera3HardwareInterface::getLogLevel()
8990 {
8991     char prop[PROPERTY_VALUE_MAX];
8992     uint32_t globalLogLevel = 0;
8993 
8994     property_get("persist.camera.hal.debug", prop, "0");
8995     int val = atoi(prop);
8996     if (0 <= val) {
8997         gCamHal3LogLevel = (uint32_t)val;
8998     }
8999     property_get("persist.camera.global.debug", prop, "0");
9000     val = atoi(prop);
9001     if (0 <= val) {
9002         globalLogLevel = (uint32_t)val;
9003     }
9004 
9005     /* Highest log level among hal.logs and global.logs is selected */
9006     if (gCamHal3LogLevel < globalLogLevel)
9007         gCamHal3LogLevel = globalLogLevel;
9008 
9009     return;
9010 }
9011 
9012 /*===========================================================================
9013  * FUNCTION   : validateStreamRotations
9014  *
9015  * DESCRIPTION: Check if the rotations requested are supported
9016  *
9017  * PARAMETERS :
9018  *   @stream_list : streams to be configured
9019  *
9020  * RETURN     : NO_ERROR on success
9021  *              -EINVAL on failure
9022  *
9023  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)9024 int QCamera3HardwareInterface::validateStreamRotations(
9025         camera3_stream_configuration_t *streamList)
9026 {
9027     int rc = NO_ERROR;
9028 
9029     /*
9030     * Loop through all streams requested in configuration
9031     * Check if unsupported rotations have been requested on any of them
9032     */
9033     for (size_t j = 0; j < streamList->num_streams; j++){
9034         camera3_stream_t *newStream = streamList->streams[j];
9035 
9036         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9037         bool isImplDef = (newStream->format ==
9038                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9039         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9040                 isImplDef);
9041 
9042         if (isRotated && (!isImplDef || isZsl)) {
9043             ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9044                     "type:%d and stream format:%d", __func__,
9045                     newStream->rotation, newStream->stream_type,
9046                     newStream->format);
9047             rc = -EINVAL;
9048             break;
9049         }
9050     }
9051     return rc;
9052 }
9053 
9054 /*===========================================================================
9055 * FUNCTION   : getFlashInfo
9056 *
9057 * DESCRIPTION: Retrieve information about whether the device has a flash.
9058 *
9059 * PARAMETERS :
9060 *   @cameraId  : Camera id to query
9061 *   @hasFlash  : Boolean indicating whether there is a flash device
9062 *                associated with given camera
9063 *   @flashNode : If a flash device exists, this will be its device node.
9064 *
9065 * RETURN     :
9066 *   None
9067 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])9068 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9069         bool& hasFlash,
9070         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9071 {
9072     cam_capability_t* camCapability = gCamCapability[cameraId];
9073     if (NULL == camCapability) {
9074         hasFlash = false;
9075         flashNode[0] = '\0';
9076     } else {
9077         hasFlash = camCapability->flash_available;
9078         strlcpy(flashNode,
9079                 (char*)camCapability->flash_dev_name,
9080                 QCAMERA_MAX_FILEPATH_LENGTH);
9081     }
9082 }
9083 
9084 /*===========================================================================
9085 * FUNCTION   : getEepromVersionInfo
9086 *
9087 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
9088 *
9089 * PARAMETERS : None
9090 *
9091 * RETURN     : string describing EEPROM version
9092 *              "\0" if no such info available
9093 *==========================================================================*/
getEepromVersionInfo()9094 const char *QCamera3HardwareInterface::getEepromVersionInfo()
9095 {
9096     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9097 }
9098 
9099 /*===========================================================================
9100 * FUNCTION   : getLdafCalib
9101 *
9102 * DESCRIPTION: Retrieve Laser AF calibration data
9103 *
9104 * PARAMETERS : None
9105 *
9106 * RETURN     : Two uint32_t describing laser AF calibration data
9107 *              NULL if none is available.
9108 *==========================================================================*/
getLdafCalib()9109 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9110 {
9111     if (mLdafCalibExist) {
9112         return &mLdafCalib[0];
9113     } else {
9114         return NULL;
9115     }
9116 }
9117 
9118 /*===========================================================================
9119  * FUNCTION   : dynamicUpdateMetaStreamInfo
9120  *
9121  * DESCRIPTION: This function:
9122  *             (1) stops all the channels
9123  *             (2) returns error on pending requests and buffers
9124  *             (3) sends metastream_info in setparams
9125  *             (4) starts all channels
9126  *             This is useful when sensor has to be restarted to apply any
9127  *             settings such as frame rate from a different sensor mode
9128  *
9129  * PARAMETERS : None
9130  *
9131  * RETURN     : NO_ERROR on success
9132  *              Error codes on failure
9133  *
9134  *==========================================================================*/
dynamicUpdateMetaStreamInfo()9135 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9136 {
9137     ATRACE_CALL();
9138     int rc = NO_ERROR;
9139 
9140     CDBG("%s: E", __func__);
9141 
9142     rc = stopAllChannels();
9143     if (rc < 0) {
9144         ALOGE("%s: stopAllChannels failed", __func__);
9145         return rc;
9146     }
9147 
9148     rc = notifyErrorForPendingRequests();
9149     if (rc < 0) {
9150         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9151         return rc;
9152     }
9153 
9154     /* Send meta stream info once again so that ISP can start */
9155     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9156             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9157     CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9158     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9159             mParameters);
9160     if (rc < 0) {
9161         ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9162                 __func__);
9163     }
9164 
9165     rc = startAllChannels();
9166     if (rc < 0) {
9167         ALOGE("%s: startAllChannels failed", __func__);
9168         return rc;
9169     }
9170 
9171     CDBG("%s:%d X", __func__, __LINE__);
9172     return rc;
9173 }
9174 
9175 /*===========================================================================
9176  * FUNCTION   : stopAllChannels
9177  *
9178  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9179  *
9180  * PARAMETERS : None
9181  *
9182  * RETURN     : NO_ERROR on success
9183  *              Error codes on failure
9184  *
9185  *==========================================================================*/
stopAllChannels()9186 int32_t QCamera3HardwareInterface::stopAllChannels()
9187 {
9188     int32_t rc = NO_ERROR;
9189 
9190     // Stop the Streams/Channels
9191     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9192         it != mStreamInfo.end(); it++) {
9193         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9194         channel->stop();
9195         (*it)->status = INVALID;
9196     }
9197 
9198     if (mSupportChannel) {
9199         mSupportChannel->stop();
9200     }
9201     if (mAnalysisChannel) {
9202         mAnalysisChannel->stop();
9203     }
9204     if (mRawDumpChannel) {
9205         mRawDumpChannel->stop();
9206     }
9207     if (mMetadataChannel) {
9208         /* If content of mStreamInfo is not 0, there is metadata stream */
9209         mMetadataChannel->stop();
9210     }
9211 
9212     CDBG("%s:%d All channels stopped", __func__, __LINE__);
9213     return rc;
9214 }
9215 
9216 /*===========================================================================
9217  * FUNCTION   : startAllChannels
9218  *
9219  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9220  *
9221  * PARAMETERS : None
9222  *
9223  * RETURN     : NO_ERROR on success
9224  *              Error codes on failure
9225  *
9226  *==========================================================================*/
startAllChannels()9227 int32_t QCamera3HardwareInterface::startAllChannels()
9228 {
9229     int32_t rc = NO_ERROR;
9230 
9231     CDBG("%s: Start all channels ", __func__);
9232     // Start the Streams/Channels
9233     if (mMetadataChannel) {
9234         /* If content of mStreamInfo is not 0, there is metadata stream */
9235         rc = mMetadataChannel->start();
9236         if (rc < 0) {
9237             ALOGE("%s: META channel start failed", __func__);
9238             return rc;
9239         }
9240     }
9241     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9242         it != mStreamInfo.end(); it++) {
9243         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9244         rc = channel->start();
9245         if (rc < 0) {
9246             ALOGE("%s: channel start failed", __func__);
9247             return rc;
9248         }
9249     }
9250     if (mAnalysisChannel) {
9251         mAnalysisChannel->start();
9252     }
9253     if (mSupportChannel) {
9254         rc = mSupportChannel->start();
9255         if (rc < 0) {
9256             ALOGE("%s: Support channel start failed", __func__);
9257             return rc;
9258         }
9259     }
9260     if (mRawDumpChannel) {
9261         rc = mRawDumpChannel->start();
9262         if (rc < 0) {
9263             ALOGE("%s: RAW dump channel start failed", __func__);
9264             return rc;
9265         }
9266     }
9267 
9268     CDBG("%s:%d All channels started", __func__, __LINE__);
9269     return rc;
9270 }
9271 
9272 /*===========================================================================
9273  * FUNCTION   : notifyErrorForPendingRequests
9274  *
9275  * DESCRIPTION: This function sends error for all the pending requests/buffers
9276  *
9277  * PARAMETERS : None
9278  *
9279  * RETURN     : Error codes
9280  *              NO_ERROR on success
9281  *
9282  *==========================================================================*/
notifyErrorForPendingRequests()9283 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9284 {
9285     int32_t rc = NO_ERROR;
9286     unsigned int frameNum = 0;
9287     camera3_capture_result_t result;
9288     camera3_stream_buffer_t *pStream_Buf = NULL;
9289     FlushMap flushMap;
9290 
9291     memset(&result, 0, sizeof(camera3_capture_result_t));
9292 
9293     if (mPendingRequestsList.size() > 0) {
9294         pendingRequestIterator i = mPendingRequestsList.begin();
9295         frameNum = i->frame_number;
9296     } else {
9297         /* There might still be pending buffers even though there are
9298          no pending requests. Setting the frameNum to MAX so that
9299          all the buffers with smaller frame numbers are returned */
9300         frameNum = UINT_MAX;
9301     }
9302 
9303     CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9304       __func__, frameNum);
9305 
9306     // Go through the pending buffers and group them depending
9307     // on frame number
9308     for (List<PendingBufferInfo>::iterator k =
9309             mPendingBuffersMap.mPendingBufferList.begin();
9310             k != mPendingBuffersMap.mPendingBufferList.end();) {
9311 
9312         if (k->frame_number < frameNum) {
9313             ssize_t idx = flushMap.indexOfKey(k->frame_number);
9314             if (idx == NAME_NOT_FOUND) {
9315                 Vector<PendingBufferInfo> pending;
9316                 pending.add(*k);
9317                 flushMap.add(k->frame_number, pending);
9318             } else {
9319                 Vector<PendingBufferInfo> &pending =
9320                         flushMap.editValueFor(k->frame_number);
9321                 pending.add(*k);
9322             }
9323 
9324             mPendingBuffersMap.num_buffers--;
9325             k = mPendingBuffersMap.mPendingBufferList.erase(k);
9326         } else {
9327             k++;
9328         }
9329     }
9330 
9331     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9332         uint32_t frame_number = flushMap.keyAt(iFlush);
9333         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9334 
9335         // Send Error notify to frameworks for each buffer for which
9336         // metadata buffer is already sent
9337         CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9338           __func__, frame_number, pending.size());
9339 
9340         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9341         if (NULL == pStream_Buf) {
9342             ALOGE("%s: No memory for pending buffers array", __func__);
9343             return NO_MEMORY;
9344         }
9345         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9346 
9347         for (size_t j = 0; j < pending.size(); j++) {
9348             const PendingBufferInfo &info = pending.itemAt(j);
9349             camera3_notify_msg_t notify_msg;
9350             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9351             notify_msg.type = CAMERA3_MSG_ERROR;
9352             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9353             notify_msg.message.error.error_stream = info.stream;
9354             notify_msg.message.error.frame_number = frame_number;
9355             pStream_Buf[j].acquire_fence = -1;
9356             pStream_Buf[j].release_fence = -1;
9357             pStream_Buf[j].buffer = info.buffer;
9358             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9359             pStream_Buf[j].stream = info.stream;
9360             mCallbackOps->notify(mCallbackOps, &notify_msg);
9361             CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9362                     frame_number, info.stream);
9363         }
9364 
9365         result.result = NULL;
9366         result.frame_number = frame_number;
9367         result.num_output_buffers = (uint32_t)pending.size();
9368         result.output_buffers = pStream_Buf;
9369         mCallbackOps->process_capture_result(mCallbackOps, &result);
9370 
9371         delete [] pStream_Buf;
9372     }
9373 
9374     CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9375 
9376     flushMap.clear();
9377     for (List<PendingBufferInfo>::iterator k =
9378             mPendingBuffersMap.mPendingBufferList.begin();
9379             k != mPendingBuffersMap.mPendingBufferList.end();) {
9380         ssize_t idx = flushMap.indexOfKey(k->frame_number);
9381         if (idx == NAME_NOT_FOUND) {
9382             Vector<PendingBufferInfo> pending;
9383             pending.add(*k);
9384             flushMap.add(k->frame_number, pending);
9385         } else {
9386             Vector<PendingBufferInfo> &pending =
9387                     flushMap.editValueFor(k->frame_number);
9388             pending.add(*k);
9389         }
9390 
9391         mPendingBuffersMap.num_buffers--;
9392         k = mPendingBuffersMap.mPendingBufferList.erase(k);
9393     }
9394 
9395     pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9396 
9397     // Go through the pending requests info and send error request to framework
9398     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9399         uint32_t frame_number = flushMap.keyAt(iFlush);
9400         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9401         CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9402               __func__, frame_number);
9403 
9404         // Send shutter notify to frameworks
9405         camera3_notify_msg_t notify_msg;
9406         memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9407         notify_msg.type = CAMERA3_MSG_ERROR;
9408         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9409         notify_msg.message.error.error_stream = NULL;
9410         notify_msg.message.error.frame_number = frame_number;
9411         mCallbackOps->notify(mCallbackOps, &notify_msg);
9412 
9413         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9414         if (NULL == pStream_Buf) {
9415             ALOGE("%s: No memory for pending buffers array", __func__);
9416             return NO_MEMORY;
9417         }
9418         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9419 
9420         for (size_t j = 0; j < pending.size(); j++) {
9421             const PendingBufferInfo &info = pending.itemAt(j);
9422             pStream_Buf[j].acquire_fence = -1;
9423             pStream_Buf[j].release_fence = -1;
9424             pStream_Buf[j].buffer = info.buffer;
9425             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9426             pStream_Buf[j].stream = info.stream;
9427         }
9428 
9429         result.input_buffer = i->input_buffer;
9430         result.num_output_buffers = (uint32_t)pending.size();
9431         result.output_buffers = pStream_Buf;
9432         result.result = NULL;
9433         result.frame_number = frame_number;
9434         mCallbackOps->process_capture_result(mCallbackOps, &result);
9435         delete [] pStream_Buf;
9436         i = erasePendingRequest(i);
9437     }
9438 
9439     /* Reset pending frame Drop list and requests list */
9440     mPendingFrameDropList.clear();
9441 
9442     flushMap.clear();
9443     mPendingBuffersMap.num_buffers = 0;
9444     mPendingBuffersMap.mPendingBufferList.clear();
9445     mPendingReprocessResultList.clear();
9446     CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9447 
9448     return rc;
9449 }
9450 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)9451 bool QCamera3HardwareInterface::isOnEncoder(
9452         const cam_dimension_t max_viewfinder_size,
9453         uint32_t width, uint32_t height)
9454 {
9455     return (width > (uint32_t)max_viewfinder_size.width ||
9456             height > (uint32_t)max_viewfinder_size.height);
9457 }
9458 
9459 /*===========================================================================
9460  * FUNCTION   : setBundleInfo
9461  *
9462  * DESCRIPTION: Set bundle info for all streams that are bundle.
9463  *
9464  * PARAMETERS : None
9465  *
9466  * RETURN     : NO_ERROR on success
9467  *              Error codes on failure
9468  *==========================================================================*/
setBundleInfo()9469 int32_t QCamera3HardwareInterface::setBundleInfo()
9470 {
9471     int32_t rc = NO_ERROR;
9472 
9473     if (mChannelHandle) {
9474         cam_bundle_config_t bundleInfo;
9475         memset(&bundleInfo, 0, sizeof(bundleInfo));
9476         rc = mCameraHandle->ops->get_bundle_info(
9477                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9478         if (rc != NO_ERROR) {
9479             ALOGE("%s: get_bundle_info failed", __func__);
9480             return rc;
9481         }
9482         if (mAnalysisChannel) {
9483             mAnalysisChannel->setBundleInfo(bundleInfo);
9484         }
9485         if (mSupportChannel) {
9486             mSupportChannel->setBundleInfo(bundleInfo);
9487         }
9488         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9489                 it != mStreamInfo.end(); it++) {
9490             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9491             channel->setBundleInfo(bundleInfo);
9492         }
9493         if (mRawDumpChannel) {
9494             mRawDumpChannel->setBundleInfo(bundleInfo);
9495         }
9496     }
9497 
9498     return rc;
9499 }
9500 
9501 }; //end namespace qcamera
9502