• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33 
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <sync/sync.h>
46 #include <gralloc_priv.h>
47 #include "util/QCameraFlash.h"
48 #include "QCamera3HWI.h"
49 #include "QCamera3Mem.h"
50 #include "QCamera3Channel.h"
51 #include "QCamera3PostProc.h"
52 #include "QCamera3VendorTags.h"
53 #include "cam_cond.h"
54 
55 using namespace android;
56 
57 namespace qcamera {
58 
59 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
60 
61 #define EMPTY_PIPELINE_DELAY 2
62 #define PARTIAL_RESULT_COUNT 3
63 #define FRAME_SKIP_DELAY     0
64 #define CAM_MAX_SYNC_LATENCY 4
65 
66 #define MAX_VALUE_8BIT ((1<<8)-1)
67 #define MAX_VALUE_10BIT ((1<<10)-1)
68 #define MAX_VALUE_12BIT ((1<<12)-1)
69 
70 #define VIDEO_4K_WIDTH  3840
71 #define VIDEO_4K_HEIGHT 2160
72 
73 #define MAX_EIS_WIDTH 1920
74 #define MAX_EIS_HEIGHT 1080
75 
76 #define MAX_RAW_STREAMS        1
77 #define MAX_STALLING_STREAMS   1
78 #define MAX_PROCESSED_STREAMS  3
79 /* Batch mode is enabled only if FPS set is equal to or greater than this */
80 #define MIN_FPS_FOR_BATCH_MODE (120)
81 #define PREVIEW_FPS_FOR_HFR    (30)
82 #define DEFAULT_VIDEO_FPS      (30.0)
83 #define MAX_HFR_BATCH_SIZE     (8)
84 #define REGIONS_TUPLE_COUNT    5
85 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
86 
87 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
88 
89 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
90                                               CAM_QCOM_FEATURE_CROP |\
91                                               CAM_QCOM_FEATURE_ROTATION |\
92                                               CAM_QCOM_FEATURE_SHARPNESS |\
93                                               CAM_QCOM_FEATURE_SCALE |\
94                                               CAM_QCOM_FEATURE_CAC |\
95                                               CAM_QCOM_FEATURE_CDS )
96 
97 #define TIMEOUT_NEVER -1
98 
99 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
100 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
101 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
102 volatile uint32_t gCamHal3LogLevel = 1;
103 
104 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
105     {"On",  CAM_CDS_MODE_ON},
106     {"Off", CAM_CDS_MODE_OFF},
107     {"Auto",CAM_CDS_MODE_AUTO}
108 };
109 
110 const QCamera3HardwareInterface::QCameraMap<
111         camera_metadata_enum_android_control_effect_mode_t,
112         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
113     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
114     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
115     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
116     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
117     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
118     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
119     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
120     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
121     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
122 };
123 
124 const QCamera3HardwareInterface::QCameraMap<
125         camera_metadata_enum_android_control_awb_mode_t,
126         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
127     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
128     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
129     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
130     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
131     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
132     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
133     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
134     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
135     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
136 };
137 
138 const QCamera3HardwareInterface::QCameraMap<
139         camera_metadata_enum_android_control_scene_mode_t,
140         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
141     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
142     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
143     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
144     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
145     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
146     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
147     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
148     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
149     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
150     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
151     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
152     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
153     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
154     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
155     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
156     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
157 };
158 
159 const QCamera3HardwareInterface::QCameraMap<
160         camera_metadata_enum_android_control_af_mode_t,
161         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
162     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
163     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
164     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
165     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
166     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
167     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
168     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
169 };
170 
171 const QCamera3HardwareInterface::QCameraMap<
172         camera_metadata_enum_android_color_correction_aberration_mode_t,
173         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
174     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
175             CAM_COLOR_CORRECTION_ABERRATION_OFF },
176     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
177             CAM_COLOR_CORRECTION_ABERRATION_FAST },
178     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
179             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
180 };
181 
182 const QCamera3HardwareInterface::QCameraMap<
183         camera_metadata_enum_android_control_ae_antibanding_mode_t,
184         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
185     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
186     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
187     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
188     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
189 };
190 
191 const QCamera3HardwareInterface::QCameraMap<
192         camera_metadata_enum_android_control_ae_mode_t,
193         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
194     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
195     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
196     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
197     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
198     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
199 };
200 
201 const QCamera3HardwareInterface::QCameraMap<
202         camera_metadata_enum_android_flash_mode_t,
203         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
204     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
205     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
206     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
207 };
208 
209 const QCamera3HardwareInterface::QCameraMap<
210         camera_metadata_enum_android_statistics_face_detect_mode_t,
211         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
212     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
213     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
214     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
215 };
216 
217 const QCamera3HardwareInterface::QCameraMap<
218         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
219         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
220     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
221       CAM_FOCUS_UNCALIBRATED },
222     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
223       CAM_FOCUS_APPROXIMATE },
224     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
225       CAM_FOCUS_CALIBRATED }
226 };
227 
228 const QCamera3HardwareInterface::QCameraMap<
229         camera_metadata_enum_android_lens_state_t,
230         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
231     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
232     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
233 };
234 
235 const int32_t available_thumbnail_sizes[] = {0, 0,
236                                              176, 144,
237                                              320, 240,
238                                              432, 288,
239                                              480, 288,
240                                              512, 288,
241                                              512, 384};
242 
243 const QCamera3HardwareInterface::QCameraMap<
244         camera_metadata_enum_android_sensor_test_pattern_mode_t,
245         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
246     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
247     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
248     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
249     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
250     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
251 };
252 
253 /* Since there is no mapping for all the options some Android enum are not listed.
254  * Also, the order in this list is important because while mapping from HAL to Android it will
255  * traverse from lower to higher index which means that for HAL values that are map to different
256  * Android values, the traverse logic will select the first one found.
257  */
258 const QCamera3HardwareInterface::QCameraMap<
259         camera_metadata_enum_android_sensor_reference_illuminant1_t,
260         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
261     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
262     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
263     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
264     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
265     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
266     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
267     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
268     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
269     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
270     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
271     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
272     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
273     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
274     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
277 };
278 
279 const QCamera3HardwareInterface::QCameraMap<
280         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
281     { 60, CAM_HFR_MODE_60FPS},
282     { 90, CAM_HFR_MODE_90FPS},
283     { 120, CAM_HFR_MODE_120FPS},
284     { 150, CAM_HFR_MODE_150FPS},
285     { 180, CAM_HFR_MODE_180FPS},
286     { 210, CAM_HFR_MODE_210FPS},
287     { 240, CAM_HFR_MODE_240FPS},
288     { 480, CAM_HFR_MODE_480FPS},
289 };
290 
291 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
292     .initialize =                         QCamera3HardwareInterface::initialize,
293     .configure_streams =                  QCamera3HardwareInterface::configure_streams,
294     .register_stream_buffers =            NULL,
295     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
296     .process_capture_request =            QCamera3HardwareInterface::process_capture_request,
297     .get_metadata_vendor_tag_ops =        NULL,
298     .dump =                               QCamera3HardwareInterface::dump,
299     .flush =                              QCamera3HardwareInterface::flush,
300     .reserved =                           {0},
301 };
302 
303 /*===========================================================================
304  * FUNCTION   : QCamera3HardwareInterface
305  *
306  * DESCRIPTION: constructor of QCamera3HardwareInterface
307  *
308  * PARAMETERS :
309  *   @cameraId  : camera ID
310  *
311  * RETURN     : none
312  *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)313 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
314         const camera_module_callbacks_t *callbacks)
315     : mCameraId(cameraId),
316       mCameraHandle(NULL),
317       mCameraOpened(false),
318       mCameraInitialized(false),
319       mCallbackOps(NULL),
320       mMetadataChannel(NULL),
321       mPictureChannel(NULL),
322       mRawChannel(NULL),
323       mSupportChannel(NULL),
324       mAnalysisChannel(NULL),
325       mRawDumpChannel(NULL),
326       mDummyBatchChannel(NULL),
327       mChannelHandle(0),
328       mFirstRequest(false),
329       mFirstConfiguration(true),
330       mFlush(false),
331       mParamHeap(NULL),
332       mParameters(NULL),
333       mPrevParameters(NULL),
334       m_bIsVideo(false),
335       m_bIs4KVideo(false),
336       m_bEisSupportedSize(false),
337       m_bEisEnable(false),
338       m_MobicatMask(0),
339       mMinProcessedFrameDuration(0),
340       mMinJpegFrameDuration(0),
341       mMinRawFrameDuration(0),
342       mMetaFrameCount(0U),
343       mUpdateDebugLevel(false),
344       mCallbacks(callbacks),
345       mCaptureIntent(0),
346       mHybridAeEnable(0),
347       mBatchSize(0),
348       mToBeQueuedVidBufs(0),
349       mHFRVideoFps(DEFAULT_VIDEO_FPS),
350       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
351       mFirstFrameNumberInBatch(0),
352       mNeedSensorRestart(false),
353       mLdafCalibExist(false),
354       mPowerHintEnabled(false),
355       mLastCustIntentFrmNum(-1)
356 {
357     getLogLevel();
358     m_perfLock.lock_init();
359     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
360     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
361     mCameraDevice.common.close = close_camera_device;
362     mCameraDevice.ops = &mCameraOps;
363     mCameraDevice.priv = this;
364     gCamCapability[cameraId]->version = CAM_HAL_V3;
365     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
366     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
367     gCamCapability[cameraId]->min_num_pp_bufs = 3;
368 
369     PTHREAD_COND_INIT(&mRequestCond);
370     mPendingLiveRequest = 0;
371     mCurrentRequestId = -1;
372     pthread_mutex_init(&mMutex, NULL);
373 
374     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
375         mDefaultMetadata[i] = NULL;
376 
377     // Getting system props of different kinds
378     char prop[PROPERTY_VALUE_MAX];
379     memset(prop, 0, sizeof(prop));
380     property_get("persist.camera.raw.dump", prop, "0");
381     mEnableRawDump = atoi(prop);
382     if (mEnableRawDump)
383         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
384 
385     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
386     memset(mLdafCalib, 0, sizeof(mLdafCalib));
387 
388     memset(prop, 0, sizeof(prop));
389     property_get("persist.camera.tnr.preview", prop, "1");
390     m_bTnrPreview = (uint8_t)atoi(prop);
391 
392     memset(prop, 0, sizeof(prop));
393     property_get("persist.camera.tnr.video", prop, "1");
394     m_bTnrVideo = (uint8_t)atoi(prop);
395 
396     mPendingBuffersMap.num_buffers = 0;
397     mPendingBuffersMap.last_frame_number = -1;
398 }
399 
400 /*===========================================================================
401  * FUNCTION   : ~QCamera3HardwareInterface
402  *
403  * DESCRIPTION: destructor of QCamera3HardwareInterface
404  *
405  * PARAMETERS : none
406  *
407  * RETURN     : none
408  *==========================================================================*/
~QCamera3HardwareInterface()409 QCamera3HardwareInterface::~QCamera3HardwareInterface()
410 {
411     CDBG("%s: E", __func__);
412     bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
413 
414     /* Turn off current power hint before acquiring perfLock in case they
415      * conflict with each other */
416     disablePowerHint();
417 
418     m_perfLock.lock_acq();
419 
420     /* We need to stop all streams before deleting any stream */
421     if (mRawDumpChannel) {
422         mRawDumpChannel->stop();
423     }
424 
425     // NOTE: 'camera3_stream_t *' objects are already freed at
426     //        this stage by the framework
427     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
428         it != mStreamInfo.end(); it++) {
429         QCamera3ProcessingChannel *channel = (*it)->channel;
430         if (channel) {
431             channel->stop();
432         }
433     }
434     if (mSupportChannel)
435         mSupportChannel->stop();
436 
437     if (mAnalysisChannel) {
438         mAnalysisChannel->stop();
439     }
440     if (mMetadataChannel) {
441         mMetadataChannel->stop();
442     }
443     if (mChannelHandle) {
444         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
445                 mChannelHandle);
446         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
447     }
448 
449     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
450         it != mStreamInfo.end(); it++) {
451         QCamera3ProcessingChannel *channel = (*it)->channel;
452         if (channel)
453             delete channel;
454         free (*it);
455     }
456     if (mSupportChannel) {
457         delete mSupportChannel;
458         mSupportChannel = NULL;
459     }
460 
461     if (mAnalysisChannel) {
462         delete mAnalysisChannel;
463         mAnalysisChannel = NULL;
464     }
465     if (mRawDumpChannel) {
466         delete mRawDumpChannel;
467         mRawDumpChannel = NULL;
468     }
469     if (mDummyBatchChannel) {
470         delete mDummyBatchChannel;
471         mDummyBatchChannel = NULL;
472     }
473     mPictureChannel = NULL;
474 
475     if (mMetadataChannel) {
476         delete mMetadataChannel;
477         mMetadataChannel = NULL;
478     }
479 
480     /* Clean up all channels */
481     if (mCameraInitialized) {
482         if(!mFirstConfiguration){
483             clear_metadata_buffer(mParameters);
484 
485             // Check if there is still pending buffer not yet returned.
486             if (hasPendingBuffers) {
487                 for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
488                     ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
489                         __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
490                         pendingBuffer.stream->height);
491                 }
492                 ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
493                 uint8_t restart = TRUE;
494                 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
495                         restart);
496             }
497 
498             //send the last unconfigure
499             cam_stream_size_info_t stream_config_info;
500             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
501             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
502             stream_config_info.buffer_info.max_buffers =
503                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
504             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
505                     stream_config_info);
506 
507             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
508             if (rc < 0) {
509                 ALOGE("%s: set_parms failed for unconfigure", __func__);
510             }
511         }
512         deinitParameters();
513     }
514 
515     if (mChannelHandle) {
516         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
517                 mChannelHandle);
518         ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
519         mChannelHandle = 0;
520     }
521 
522     if (mCameraOpened)
523         closeCamera();
524 
525     mPendingBuffersMap.mPendingBufferList.clear();
526     mPendingReprocessResultList.clear();
527     for (pendingRequestIterator i = mPendingRequestsList.begin();
528             i != mPendingRequestsList.end();) {
529         i = erasePendingRequest(i);
530     }
531     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
532         if (mDefaultMetadata[i])
533             free_camera_metadata(mDefaultMetadata[i]);
534 
535     m_perfLock.lock_rel();
536     m_perfLock.lock_deinit();
537 
538     pthread_cond_destroy(&mRequestCond);
539 
540     pthread_mutex_destroy(&mMutex);
541 
542     if (hasPendingBuffers) {
543         ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
544                 " Exiting here...", __func__);
545         exit(EXIT_FAILURE);
546     }
547     CDBG("%s: X", __func__);
548 }
549 
550 /*===========================================================================
551  * FUNCTION   : erasePendingRequest
552  *
553  * DESCRIPTION: function to erase a desired pending request after freeing any
554  *              allocated memory
555  *
556  * PARAMETERS :
557  *   @i       : iterator pointing to pending request to be erased
558  *
559  * RETURN     : iterator pointing to the next request
560  *==========================================================================*/
561 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)562         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
563 {
564     if (i->input_buffer != NULL) {
565         free(i->input_buffer);
566         i->input_buffer = NULL;
567     }
568     if (i->settings != NULL)
569         free_camera_metadata((camera_metadata_t*)i->settings);
570     return mPendingRequestsList.erase(i);
571 }
572 
573 /*===========================================================================
574  * FUNCTION   : camEvtHandle
575  *
576  * DESCRIPTION: Function registered to mm-camera-interface to handle events
577  *
578  * PARAMETERS :
579  *   @camera_handle : interface layer camera handle
580  *   @evt           : ptr to event
581  *   @user_data     : user data ptr
582  *
583  * RETURN     : none
584  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)585 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
586                                           mm_camera_event_t *evt,
587                                           void *user_data)
588 {
589     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
590     if (obj && evt) {
591         switch(evt->server_event_type) {
592             case CAM_EVENT_TYPE_DAEMON_DIED:
593                 ALOGE("%s: Fatal, camera daemon died", __func__);
594                 //close the camera backend
595                 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
596                         && obj->mCameraHandle->ops) {
597                     obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
598                 } else {
599                     ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
600                             __func__);
601                 }
602                 camera3_notify_msg_t notify_msg;
603                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
604                 notify_msg.type = CAMERA3_MSG_ERROR;
605                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
606                 notify_msg.message.error.error_stream = NULL;
607                 notify_msg.message.error.frame_number = 0;
608                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
609                 break;
610 
611             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
612                 CDBG("%s: HAL got request pull from Daemon", __func__);
613                 pthread_mutex_lock(&obj->mMutex);
614                 obj->mWokenUpByDaemon = true;
615                 obj->unblockRequestIfNecessary();
616                 pthread_mutex_unlock(&obj->mMutex);
617                 break;
618 
619             default:
620                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
621                         evt->server_event_type);
622                 break;
623         }
624     } else {
625         ALOGE("%s: NULL user_data/evt", __func__);
626     }
627 }
628 
629 /*===========================================================================
630  * FUNCTION   : openCamera
631  *
632  * DESCRIPTION: open camera
633  *
634  * PARAMETERS :
635  *   @hw_device  : double ptr for camera device struct
636  *
637  * RETURN     : int32_t type of status
638  *              NO_ERROR  -- success
639  *              none-zero failure code
640  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)641 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
642 {
643     int rc = 0;
644     if (mCameraOpened) {
645         *hw_device = NULL;
646         return PERMISSION_DENIED;
647     }
648     m_perfLock.lock_acq();
649     rc = openCamera();
650     if (rc == 0) {
651         *hw_device = &mCameraDevice.common;
652     } else
653         *hw_device = NULL;
654 
655     m_perfLock.lock_rel();
656     return rc;
657 }
658 
659 /*===========================================================================
660  * FUNCTION   : openCamera
661  *
662  * DESCRIPTION: open camera
663  *
664  * PARAMETERS : none
665  *
666  * RETURN     : int32_t type of status
667  *              NO_ERROR  -- success
668  *              none-zero failure code
669  *==========================================================================*/
openCamera()670 int QCamera3HardwareInterface::openCamera()
671 {
672     int rc = 0;
673 
674     ATRACE_CALL();
675     if (mCameraHandle) {
676         ALOGE("Failure: Camera already opened");
677         return ALREADY_EXISTS;
678     }
679 
680     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
681     if (rc < 0) {
682         ALOGE("%s: Failed to reserve flash for camera id: %d",
683                 __func__,
684                 mCameraId);
685         return UNKNOWN_ERROR;
686     }
687 
688     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
689     if (rc) {
690         ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
691         return rc;
692     }
693 
694     mCameraOpened = true;
695 
696     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
697             camEvtHandle, (void *)this);
698 
699     if (rc < 0) {
700         ALOGE("%s: Error, failed to register event callback", __func__);
701         /* Not closing camera here since it is already handled in destructor */
702         return FAILED_TRANSACTION;
703     }
704     mFirstConfiguration = true;
705     return NO_ERROR;
706 }
707 
708 /*===========================================================================
709  * FUNCTION   : closeCamera
710  *
711  * DESCRIPTION: close camera
712  *
713  * PARAMETERS : none
714  *
715  * RETURN     : int32_t type of status
716  *              NO_ERROR  -- success
717  *              none-zero failure code
718  *==========================================================================*/
closeCamera()719 int QCamera3HardwareInterface::closeCamera()
720 {
721     ATRACE_CALL();
722     int rc = NO_ERROR;
723 
724     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
725     mCameraHandle = NULL;
726     mCameraOpened = false;
727 
728     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
729         CDBG("%s: Failed to release flash for camera id: %d",
730                 __func__,
731                 mCameraId);
732     }
733 
734     return rc;
735 }
736 
737 /*===========================================================================
738  * FUNCTION   : initialize
739  *
740  * DESCRIPTION: Initialize frameworks callback functions
741  *
742  * PARAMETERS :
743  *   @callback_ops : callback function to frameworks
744  *
745  * RETURN     :
746  *
747  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)748 int QCamera3HardwareInterface::initialize(
749         const struct camera3_callback_ops *callback_ops)
750 {
751     ATRACE_CALL();
752     int rc;
753 
754     pthread_mutex_lock(&mMutex);
755 
756     rc = initParameters();
757     if (rc < 0) {
758         ALOGE("%s: initParamters failed %d", __func__, rc);
759        goto err1;
760     }
761     mCallbackOps = callback_ops;
762 
763     mChannelHandle = mCameraHandle->ops->add_channel(
764             mCameraHandle->camera_handle, NULL, NULL, this);
765     if (mChannelHandle == 0) {
766         ALOGE("%s: add_channel failed", __func__);
767         rc = -ENOMEM;
768         pthread_mutex_unlock(&mMutex);
769         return rc;
770     }
771 
772     pthread_mutex_unlock(&mMutex);
773     mCameraInitialized = true;
774     return 0;
775 
776 err1:
777     pthread_mutex_unlock(&mMutex);
778     return rc;
779 }
780 
781 /*===========================================================================
782  * FUNCTION   : validateStreamDimensions
783  *
784  * DESCRIPTION: Check if the configuration requested are those advertised
785  *
786  * PARAMETERS :
787  *   @stream_list : streams to be configured
788  *
789  * RETURN     :
790  *
791  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)792 int QCamera3HardwareInterface::validateStreamDimensions(
793         camera3_stream_configuration_t *streamList)
794 {
795     int rc = NO_ERROR;
796     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
797     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
798     size_t count = 0;
799 
800     camera3_stream_t *inputStream = NULL;
801     /*
802     * Loop through all streams to find input stream if it exists*
803     */
804     for (size_t i = 0; i< streamList->num_streams; i++) {
805         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
806             if (inputStream != NULL) {
807                 ALOGE("%s: Error, Multiple input streams requested");
808                 return -EINVAL;
809             }
810             inputStream = streamList->streams[i];
811         }
812     }
813     /*
814     * Loop through all streams requested in configuration
815     * Check if unsupported sizes have been requested on any of them
816     */
817     for (size_t j = 0; j < streamList->num_streams; j++) {
818         bool sizeFound = false;
819         size_t jpeg_sizes_cnt = 0;
820         camera3_stream_t *newStream = streamList->streams[j];
821 
822         uint32_t rotatedHeight = newStream->height;
823         uint32_t rotatedWidth = newStream->width;
824         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
825                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
826             rotatedHeight = newStream->width;
827             rotatedWidth = newStream->height;
828         }
829 
830         /*
831         * Sizes are different for each type of stream format check against
832         * appropriate table.
833         */
834         switch (newStream->format) {
835         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
836         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
837         case HAL_PIXEL_FORMAT_RAW10:
838             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
839             for (size_t i = 0; i < count; i++) {
840                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
841                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
842                     sizeFound = true;
843                     break;
844                 }
845             }
846             break;
847         case HAL_PIXEL_FORMAT_BLOB:
848             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
849             /* Generate JPEG sizes table */
850             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
851                     count,
852                     MAX_SIZES_CNT,
853                     available_processed_sizes);
854             jpeg_sizes_cnt = filterJpegSizes(
855                     available_jpeg_sizes,
856                     available_processed_sizes,
857                     count * 2,
858                     MAX_SIZES_CNT * 2,
859                     gCamCapability[mCameraId]->active_array_size,
860                     gCamCapability[mCameraId]->max_downscale_factor);
861 
862             /* Verify set size against generated sizes table */
863             for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
864                 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
865                         ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
866                     sizeFound = true;
867                     break;
868                 }
869             }
870             break;
871         case HAL_PIXEL_FORMAT_YCbCr_420_888:
872         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
873         default:
874             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
875                     || newStream->stream_type == CAMERA3_STREAM_INPUT
876                     || IS_USAGE_ZSL(newStream->usage)) {
877                 if (((int32_t)rotatedWidth ==
878                                 gCamCapability[mCameraId]->active_array_size.width) &&
879                                 ((int32_t)rotatedHeight ==
880                                 gCamCapability[mCameraId]->active_array_size.height)) {
881                     sizeFound = true;
882                     break;
883                 }
884                 /* We could potentially break here to enforce ZSL stream
885                  * set from frameworks always is full active array size
886                  * but it is not clear from the spc if framework will always
887                  * follow that, also we have logic to override to full array
888                  * size, so keeping the logic lenient at the moment
889                  */
890             }
891             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
892                     MAX_SIZES_CNT);
893             for (size_t i = 0; i < count; i++) {
894                 if (((int32_t)rotatedWidth ==
895                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
896                             ((int32_t)rotatedHeight ==
897                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
898                     sizeFound = true;
899                     break;
900                 }
901             }
902             break;
903         } /* End of switch(newStream->format) */
904 
905         /* We error out even if a single stream has unsupported size set */
906         if (!sizeFound) {
907             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
908                   "type:%d", __func__, rotatedWidth, rotatedHeight,
909                   newStream->format);
910             ALOGE("%s: Active array size is  %d x %d", __func__,
911                     gCamCapability[mCameraId]->active_array_size.width,
912                     gCamCapability[mCameraId]->active_array_size.height);
913             rc = -EINVAL;
914             break;
915         }
916     } /* End of for each stream */
917     return rc;
918 }
919 
920 /*==============================================================================
921  * FUNCTION   : isSupportChannelNeeded
922  *
923  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
924  *
925  * PARAMETERS :
926  *   @stream_list : streams to be configured
927  *   @stream_config_info : the config info for streams to be configured
928  *
929  * RETURN     : Boolen true/false decision
930  *
931  *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)932 bool QCamera3HardwareInterface::isSupportChannelNeeded(
933         camera3_stream_configuration_t *streamList,
934         cam_stream_size_info_t stream_config_info)
935 {
936     uint32_t i;
937     bool pprocRequested = false;
938     /* Check for conditions where PProc pipeline does not have any streams*/
939     for (i = 0; i < stream_config_info.num_streams; i++) {
940         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
941                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
942             pprocRequested = true;
943             break;
944         }
945     }
946 
947     if (pprocRequested == false )
948         return true;
949 
950     /* Dummy stream needed if only raw or jpeg streams present */
951     for (i = 0; i < streamList->num_streams; i++) {
952         switch(streamList->streams[i]->format) {
953             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
954             case HAL_PIXEL_FORMAT_RAW10:
955             case HAL_PIXEL_FORMAT_RAW16:
956             case HAL_PIXEL_FORMAT_BLOB:
957                 break;
958             default:
959                 return false;
960         }
961     }
962     return true;
963 }
964 
965 /*==============================================================================
966  * FUNCTION   : getSensorOutputSize
967  *
968  * DESCRIPTION: Get sensor output size based on current stream configuratoin
969  *
970  * PARAMETERS :
971  *   @sensor_dim : sensor output dimension (output)
972  *
973  * RETURN     : int32_t type of status
974  *              NO_ERROR  -- success
975  *              none-zero failure code
976  *
977  *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)978 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
979 {
980     int32_t rc = NO_ERROR;
981 
982     cam_dimension_t max_dim = {0, 0};
983     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
984         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
985             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
986         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
987             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
988     }
989 
990     clear_metadata_buffer(mParameters);
991 
992     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
993             max_dim);
994     if (rc != NO_ERROR) {
995         ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
996         return rc;
997     }
998 
999     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1000     if (rc != NO_ERROR) {
1001         ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
1002         return rc;
1003     }
1004 
1005     clear_metadata_buffer(mParameters);
1006     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1007 
1008     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1009             mParameters);
1010     if (rc != NO_ERROR) {
1011         ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
1012         return rc;
1013     }
1014 
1015     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1016     ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
1017 
1018     return rc;
1019 }
1020 
1021 /*==============================================================================
1022  * FUNCTION   : enablePowerHint
1023  *
1024  * DESCRIPTION: enable single powerhint for preview and different video modes.
1025  *
1026  * PARAMETERS :
1027  *
1028  * RETURN     : NULL
1029  *
1030  *==========================================================================*/
enablePowerHint()1031 void QCamera3HardwareInterface::enablePowerHint()
1032 {
1033     if (!mPowerHintEnabled) {
1034         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1035         mPowerHintEnabled = true;
1036     }
1037 }
1038 
1039 /*==============================================================================
1040  * FUNCTION   : disablePowerHint
1041  *
1042  * DESCRIPTION: disable current powerhint.
1043  *
1044  * PARAMETERS :
1045  *
1046  * RETURN     : NULL
1047  *
1048  *==========================================================================*/
disablePowerHint()1049 void QCamera3HardwareInterface::disablePowerHint()
1050 {
1051     if (mPowerHintEnabled) {
1052         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1053         mPowerHintEnabled = false;
1054     }
1055 }
1056 
1057 /*===========================================================================
1058  * FUNCTION   : configureStreams
1059  *
1060  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1061  *              and output streams.
1062  *
1063  * PARAMETERS :
1064  *   @stream_list : streams to be configured
1065  *
1066  * RETURN     :
1067  *
1068  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1069 int QCamera3HardwareInterface::configureStreams(
1070         camera3_stream_configuration_t *streamList)
1071 {
1072     ATRACE_CALL();
1073     int rc = 0;
1074 
1075     // Acquire perfLock before configure streams
1076     m_perfLock.lock_acq();
1077     rc = configureStreamsPerfLocked(streamList);
1078     m_perfLock.lock_rel();
1079 
1080     return rc;
1081 }
1082 
1083 /*===========================================================================
1084  * FUNCTION   : configureStreamsPerfLocked
1085  *
1086  * DESCRIPTION: configureStreams while perfLock is held.
1087  *
1088  * PARAMETERS :
1089  *   @stream_list : streams to be configured
1090  *
1091  * RETURN     : int32_t type of status
1092  *              NO_ERROR  -- success
1093  *              none-zero failure code
1094  *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1095 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1096         camera3_stream_configuration_t *streamList)
1097 {
1098     ATRACE_CALL();
1099     int rc = 0;
1100 
1101     // Sanity check stream_list
1102     if (streamList == NULL) {
1103         ALOGE("%s: NULL stream configuration", __func__);
1104         return BAD_VALUE;
1105     }
1106     if (streamList->streams == NULL) {
1107         ALOGE("%s: NULL stream list", __func__);
1108         return BAD_VALUE;
1109     }
1110 
1111     if (streamList->num_streams < 1) {
1112         ALOGE("%s: Bad number of streams requested: %d", __func__,
1113                 streamList->num_streams);
1114         return BAD_VALUE;
1115     }
1116 
1117     if (streamList->num_streams >= MAX_NUM_STREAMS) {
1118         ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1119                 MAX_NUM_STREAMS, streamList->num_streams);
1120         return BAD_VALUE;
1121     }
1122 
1123     mOpMode = streamList->operation_mode;
1124     CDBG("%s: mOpMode: %d", __func__, mOpMode);
1125 
1126     /* first invalidate all the steams in the mStreamList
1127      * if they appear again, they will be validated */
1128     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1129             it != mStreamInfo.end(); it++) {
1130         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1131         if (channel) {
1132           channel->stop();
1133         }
1134         (*it)->status = INVALID;
1135     }
1136 
1137     if (mRawDumpChannel) {
1138         mRawDumpChannel->stop();
1139         delete mRawDumpChannel;
1140         mRawDumpChannel = NULL;
1141     }
1142 
1143     if (mSupportChannel)
1144         mSupportChannel->stop();
1145 
1146     if (mAnalysisChannel) {
1147         mAnalysisChannel->stop();
1148     }
1149     if (mMetadataChannel) {
1150         /* If content of mStreamInfo is not 0, there is metadata stream */
1151         mMetadataChannel->stop();
1152     }
1153     if (mChannelHandle) {
1154         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1155                 mChannelHandle);
1156         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1157     }
1158 
1159     pthread_mutex_lock(&mMutex);
1160 
1161     /* Check whether we have video stream */
1162     m_bIs4KVideo = false;
1163     m_bIsVideo = false;
1164     m_bEisSupportedSize = false;
1165     m_bTnrEnabled = false;
1166     bool isZsl = false;
1167     uint32_t videoWidth = 0U;
1168     uint32_t videoHeight = 0U;
1169     size_t rawStreamCnt = 0;
1170     size_t stallStreamCnt = 0;
1171     size_t processedStreamCnt = 0;
1172     // Number of streams on ISP encoder path
1173     size_t numStreamsOnEncoder = 0;
1174     size_t numYuv888OnEncoder = 0;
1175     bool bYuv888OverrideJpeg = false;
1176     cam_dimension_t largeYuv888Size = {0, 0};
1177     cam_dimension_t maxViewfinderSize = {0, 0};
1178     bool bJpegExceeds4K = false;
1179     bool bJpegOnEncoder = false;
1180     bool bUseCommonFeatureMask = false;
1181     uint32_t commonFeatureMask = 0;
1182     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1183     camera3_stream_t *inputStream = NULL;
1184     bool isJpeg = false;
1185     cam_dimension_t jpegSize = {0, 0};
1186 
1187     /*EIS configuration*/
1188     bool eisSupported = false;
1189     bool oisSupported = false;
1190     int32_t margin_index = -1;
1191     uint8_t eis_prop_set;
1192     uint32_t maxEisWidth = 0;
1193     uint32_t maxEisHeight = 0;
1194     int32_t hal_version = CAM_HAL_V3;
1195 
1196     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1197 
1198     size_t count = IS_TYPE_MAX;
1199     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1200     for (size_t i = 0; i < count; i++) {
1201         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1202             eisSupported = true;
1203             margin_index = (int32_t)i;
1204             break;
1205         }
1206     }
1207 
1208     count = CAM_OPT_STAB_MAX;
1209     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1210     for (size_t i = 0; i < count; i++) {
1211         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1212             oisSupported = true;
1213             break;
1214         }
1215     }
1216 
1217     if (eisSupported) {
1218         maxEisWidth = MAX_EIS_WIDTH;
1219         maxEisHeight = MAX_EIS_HEIGHT;
1220     }
1221 
1222     /* EIS setprop control */
1223     char eis_prop[PROPERTY_VALUE_MAX];
1224     memset(eis_prop, 0, sizeof(eis_prop));
1225     property_get("persist.camera.eis.enable", eis_prop, "0");
1226     eis_prop_set = (uint8_t)atoi(eis_prop);
1227 
1228     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1229             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1230 
1231     /* stream configurations */
1232     for (size_t i = 0; i < streamList->num_streams; i++) {
1233         camera3_stream_t *newStream = streamList->streams[i];
1234         ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1235                 "height = %d, rotation = %d, usage = 0x%x",
1236                 __func__, i, newStream->stream_type, newStream->format,
1237                 newStream->width, newStream->height, newStream->rotation,
1238                 newStream->usage);
1239         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1240                 newStream->stream_type == CAMERA3_STREAM_INPUT){
1241             isZsl = true;
1242         }
1243         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1244             inputStream = newStream;
1245         }
1246 
1247         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1248             isJpeg = true;
1249             jpegSize.width = newStream->width;
1250             jpegSize.height = newStream->height;
1251             if (newStream->width > VIDEO_4K_WIDTH ||
1252                     newStream->height > VIDEO_4K_HEIGHT)
1253                 bJpegExceeds4K = true;
1254         }
1255 
1256         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1257                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1258             m_bIsVideo = true;
1259             videoWidth = newStream->width;
1260             videoHeight = newStream->height;
1261             if ((VIDEO_4K_WIDTH <= newStream->width) &&
1262                     (VIDEO_4K_HEIGHT <= newStream->height)) {
1263                 m_bIs4KVideo = true;
1264             }
1265             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1266                                   (newStream->height <= maxEisHeight);
1267         }
1268         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1269                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1270             switch (newStream->format) {
1271             case HAL_PIXEL_FORMAT_BLOB:
1272                 stallStreamCnt++;
1273                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1274                         newStream->height)) {
1275                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1276                     numStreamsOnEncoder++;
1277                     bJpegOnEncoder = true;
1278                 }
1279                 break;
1280             case HAL_PIXEL_FORMAT_RAW10:
1281             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1282             case HAL_PIXEL_FORMAT_RAW16:
1283                 rawStreamCnt++;
1284                 break;
1285             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1286                 processedStreamCnt++;
1287                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1288                         newStream->height)) {
1289                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1290                             IS_USAGE_ZSL(newStream->usage)) {
1291                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1292                     } else {
1293                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1294                     }
1295                     numStreamsOnEncoder++;
1296                 }
1297                 break;
1298             case HAL_PIXEL_FORMAT_YCbCr_420_888:
1299                 processedStreamCnt++;
1300                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1301                         newStream->height)) {
1302                     // If Yuv888 size is not greater than 4K, set feature mask
1303                     // to SUPERSET so that it support concurrent request on
1304                     // YUV and JPEG.
1305                     if (newStream->width <= VIDEO_4K_WIDTH &&
1306                             newStream->height <= VIDEO_4K_HEIGHT) {
1307                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1308                     } else {
1309                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1310                     }
1311                     numStreamsOnEncoder++;
1312                     numYuv888OnEncoder++;
1313                     largeYuv888Size.width = newStream->width;
1314                     largeYuv888Size.height = newStream->height;
1315                 }
1316                 break;
1317             default:
1318                 processedStreamCnt++;
1319                 if (isOnEncoder(maxViewfinderSize, newStream->width,
1320                         newStream->height)) {
1321                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1322                     numStreamsOnEncoder++;
1323                 }
1324                 break;
1325             }
1326 
1327         }
1328     }
1329 
1330     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1331         !m_bIsVideo) {
1332         m_bEisEnable = false;
1333     }
1334 
1335     /* Logic to enable/disable TNR based on specific config size/etc.*/
1336     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1337             ((videoWidth == 1920 && videoHeight == 1080) ||
1338             (videoWidth == 1280 && videoHeight == 720)) &&
1339             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1340         m_bTnrEnabled = true;
1341 
1342     /* Check if num_streams is sane */
1343     if (stallStreamCnt > MAX_STALLING_STREAMS ||
1344             rawStreamCnt > MAX_RAW_STREAMS ||
1345             processedStreamCnt > MAX_PROCESSED_STREAMS) {
1346         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1347                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1348         pthread_mutex_unlock(&mMutex);
1349         return -EINVAL;
1350     }
1351     /* Check whether we have zsl stream or 4k video case */
1352     if (isZsl && m_bIsVideo) {
1353         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1354         pthread_mutex_unlock(&mMutex);
1355         return -EINVAL;
1356     }
1357     /* Check if stream sizes are sane */
1358     if (numStreamsOnEncoder > 2) {
1359         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1360                 __func__);
1361         pthread_mutex_unlock(&mMutex);
1362         return -EINVAL;
1363     } else if (1 < numStreamsOnEncoder){
1364         bUseCommonFeatureMask = true;
1365         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1366                 __func__);
1367     }
1368 
1369     /* Check if BLOB size is greater than 4k in 4k recording case */
1370     if (m_bIs4KVideo && bJpegExceeds4K) {
1371         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1372                 __func__);
1373         pthread_mutex_unlock(&mMutex);
1374         return -EINVAL;
1375     }
1376 
1377     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1378     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1379     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1380     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1381     // configurations:
1382     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1383     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1384     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1385     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1386         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1387                 __func__);
1388         pthread_mutex_unlock(&mMutex);
1389         return -EINVAL;
1390     }
1391 
1392     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1393     // the YUV stream's size is greater or equal to the JPEG size, set common
1394     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1395     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1396             jpegSize.width, jpegSize.height) &&
1397             largeYuv888Size.width > jpegSize.width &&
1398             largeYuv888Size.height > jpegSize.height) {
1399         bYuv888OverrideJpeg = true;
1400     } else if (!isJpeg && numStreamsOnEncoder > 1) {
1401         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1402     }
1403 
1404     rc = validateStreamDimensions(streamList);
1405     if (rc == NO_ERROR) {
1406         rc = validateStreamRotations(streamList);
1407     }
1408     if (rc != NO_ERROR) {
1409         ALOGE("%s: Invalid stream configuration requested!", __func__);
1410         pthread_mutex_unlock(&mMutex);
1411         return rc;
1412     }
1413 
1414     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1415     camera3_stream_t *jpegStream = NULL;
1416     for (size_t i = 0; i < streamList->num_streams; i++) {
1417         camera3_stream_t *newStream = streamList->streams[i];
1418         CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1419                 "stream size : %d x %d, stream rotation = %d",
1420                 __func__, newStream->stream_type, newStream->format,
1421                 newStream->width, newStream->height, newStream->rotation);
1422         //if the stream is in the mStreamList validate it
1423         bool stream_exists = false;
1424         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1425                 it != mStreamInfo.end(); it++) {
1426             if ((*it)->stream == newStream) {
1427                 QCamera3ProcessingChannel *channel =
1428                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
1429                 stream_exists = true;
1430                 if (channel)
1431                     delete channel;
1432                 (*it)->status = VALID;
1433                 (*it)->stream->priv = NULL;
1434                 (*it)->channel = NULL;
1435             }
1436         }
1437         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1438             //new stream
1439             stream_info_t* stream_info;
1440             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1441             if (!stream_info) {
1442                ALOGE("%s: Could not allocate stream info", __func__);
1443                rc = -ENOMEM;
1444                pthread_mutex_unlock(&mMutex);
1445                return rc;
1446             }
1447             stream_info->stream = newStream;
1448             stream_info->status = VALID;
1449             stream_info->channel = NULL;
1450             mStreamInfo.push_back(stream_info);
1451         }
1452         /* Covers Opaque ZSL and API1 F/W ZSL */
1453         if (IS_USAGE_ZSL(newStream->usage)
1454                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1455             if (zslStream != NULL) {
1456                 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1457                 pthread_mutex_unlock(&mMutex);
1458                 return BAD_VALUE;
1459             }
1460             zslStream = newStream;
1461         }
1462         /* Covers YUV reprocess */
1463         if (inputStream != NULL) {
1464             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1465                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1466                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1467                     && inputStream->width == newStream->width
1468                     && inputStream->height == newStream->height) {
1469                 if (zslStream != NULL) {
1470                     /* This scenario indicates multiple YUV streams with same size
1471                      * as input stream have been requested, since zsl stream handle
1472                      * is solely use for the purpose of overriding the size of streams
1473                      * which share h/w streams we will just make a guess here as to
1474                      * which of the stream is a ZSL stream, this will be refactored
1475                      * once we make generic logic for streams sharing encoder output
1476                      */
1477                     CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1478                 }
1479                 zslStream = newStream;
1480             }
1481         }
1482         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1483             jpegStream = newStream;
1484         }
1485     }
1486 
1487     /* If a zsl stream is set, we know that we have configured at least one input or
1488        bidirectional stream */
1489     if (NULL != zslStream) {
1490         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1491         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1492         mInputStreamInfo.format = zslStream->format;
1493         mInputStreamInfo.usage = zslStream->usage;
1494         CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1495                 __func__, mInputStreamInfo.dim.width,
1496                 mInputStreamInfo.dim.height,
1497                 mInputStreamInfo.format, mInputStreamInfo.usage);
1498     }
1499 
1500     cleanAndSortStreamInfo();
1501     if (mMetadataChannel) {
1502         delete mMetadataChannel;
1503         mMetadataChannel = NULL;
1504     }
1505     if (mSupportChannel) {
1506         delete mSupportChannel;
1507         mSupportChannel = NULL;
1508     }
1509 
1510     if (mAnalysisChannel) {
1511         delete mAnalysisChannel;
1512         mAnalysisChannel = NULL;
1513     }
1514 
1515     if (mDummyBatchChannel) {
1516         delete mDummyBatchChannel;
1517         mDummyBatchChannel = NULL;
1518     }
1519 
1520     //Create metadata channel and initialize it
1521     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1522                     mChannelHandle, mCameraHandle->ops, captureResultCb,
1523                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1524     if (mMetadataChannel == NULL) {
1525         ALOGE("%s: failed to allocate metadata channel", __func__);
1526         rc = -ENOMEM;
1527         pthread_mutex_unlock(&mMutex);
1528         return rc;
1529     }
1530     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1531     if (rc < 0) {
1532         ALOGE("%s: metadata channel initialization failed", __func__);
1533         delete mMetadataChannel;
1534         mMetadataChannel = NULL;
1535         pthread_mutex_unlock(&mMutex);
1536         return rc;
1537     }
1538 
1539     // Create analysis stream all the time, even when h/w support is not available
1540     {
1541         mAnalysisChannel = new QCamera3SupportChannel(
1542                 mCameraHandle->camera_handle,
1543                 mChannelHandle,
1544                 mCameraHandle->ops,
1545                 &gCamCapability[mCameraId]->padding_info,
1546                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1547                 CAM_STREAM_TYPE_ANALYSIS,
1548                 &gCamCapability[mCameraId]->analysis_recommended_res,
1549                 gCamCapability[mCameraId]->analysis_recommended_format,
1550                 this,
1551                 0); // force buffer count to 0
1552         if (!mAnalysisChannel) {
1553             ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1554             pthread_mutex_unlock(&mMutex);
1555             return -ENOMEM;
1556         }
1557     }
1558 
1559     bool isRawStreamRequested = false;
1560     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1561     /* Allocate channel objects for the requested streams */
1562     for (size_t i = 0; i < streamList->num_streams; i++) {
1563         camera3_stream_t *newStream = streamList->streams[i];
1564         uint32_t stream_usage = newStream->usage;
1565         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1566         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1567         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1568                 || IS_USAGE_ZSL(newStream->usage)) &&
1569             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1570             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1571             if (bUseCommonFeatureMask) {
1572                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1573                         commonFeatureMask;
1574             } else {
1575                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1576                         CAM_QCOM_FEATURE_NONE;
1577             }
1578 
1579         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1580                 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1581         } else {
1582             //for non zsl streams find out the format
1583             switch (newStream->format) {
1584             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1585               {
1586                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1587                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1588 
1589                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1590 
1591                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1592                      if (m_bTnrEnabled && m_bTnrVideo) {
1593                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1594                              CAM_QCOM_FEATURE_CPP_TNR;
1595                      }
1596 
1597                  } else {
1598 
1599                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1600                      if (m_bTnrEnabled && m_bTnrPreview) {
1601                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1602                              CAM_QCOM_FEATURE_CPP_TNR;
1603                      }
1604                  }
1605 
1606                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1607                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1608                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1609                              newStream->height;
1610                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1611                              newStream->width;
1612                  }
1613               }
1614               break;
1615            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1616               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1617               if (isOnEncoder(maxViewfinderSize, newStream->width,
1618                       newStream->height)) {
1619                   if (bUseCommonFeatureMask)
1620                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1621                               commonFeatureMask;
1622                   else
1623                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1624                               CAM_QCOM_FEATURE_NONE;
1625               } else {
1626                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1627                           CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1628               }
1629               break;
1630            case HAL_PIXEL_FORMAT_BLOB:
1631               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1632               if (m_bIs4KVideo && !isZsl) {
1633                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1634                           = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1635               } else {
1636                   if (bUseCommonFeatureMask &&
1637                           isOnEncoder(maxViewfinderSize, newStream->width,
1638                                   newStream->height)) {
1639                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1640                   } else {
1641                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1642                   }
1643               }
1644               if (isZsl) {
1645                   if (zslStream) {
1646                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1647                               (int32_t)zslStream->width;
1648                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1649                               (int32_t)zslStream->height;
1650                   } else {
1651                       ALOGE("%s: Error, No ZSL stream identified",__func__);
1652                       pthread_mutex_unlock(&mMutex);
1653                       return -EINVAL;
1654                   }
1655               } else if (m_bIs4KVideo) {
1656                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1657                           (int32_t)videoWidth;
1658                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1659                           (int32_t)videoHeight;
1660               } else if (bYuv888OverrideJpeg) {
1661                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1662                           (int32_t)largeYuv888Size.width;
1663                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1664                           (int32_t)largeYuv888Size.height;
1665               }
1666               break;
1667            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1668            case HAL_PIXEL_FORMAT_RAW16:
1669            case HAL_PIXEL_FORMAT_RAW10:
1670               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1671               isRawStreamRequested = true;
1672               break;
1673            default:
1674               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1675               mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1676               break;
1677             }
1678 
1679         }
1680 
1681         if (newStream->priv == NULL) {
1682             //New stream, construct channel
1683             switch (newStream->stream_type) {
1684             case CAMERA3_STREAM_INPUT:
1685                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1686                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1687                 break;
1688             case CAMERA3_STREAM_BIDIRECTIONAL:
1689                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1690                     GRALLOC_USAGE_HW_CAMERA_WRITE;
1691                 break;
1692             case CAMERA3_STREAM_OUTPUT:
1693                 /* For video encoding stream, set read/write rarely
1694                  * flag so that they may be set to un-cached */
1695                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1696                     newStream->usage |=
1697                          (GRALLOC_USAGE_SW_READ_RARELY |
1698                          GRALLOC_USAGE_SW_WRITE_RARELY |
1699                          GRALLOC_USAGE_HW_CAMERA_WRITE);
1700                 else if (IS_USAGE_ZSL(newStream->usage))
1701                     CDBG("%s: ZSL usage flag skipping", __func__);
1702                 else if (newStream == zslStream
1703                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1704                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1705                 } else
1706                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1707                 break;
1708             default:
1709                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1710                 break;
1711             }
1712 
1713             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1714                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1715                 QCamera3ProcessingChannel *channel = NULL;
1716                 switch (newStream->format) {
1717                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1718                     if ((newStream->usage &
1719                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1720                             (streamList->operation_mode ==
1721                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1722                     ) {
1723                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1724                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
1725                                 &gCamCapability[mCameraId]->padding_info,
1726                                 this,
1727                                 newStream,
1728                                 (cam_stream_type_t)
1729                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1730                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1731                                 mMetadataChannel,
1732                                 0); //heap buffers are not required for HFR video channel
1733                         if (channel == NULL) {
1734                             ALOGE("%s: allocation of channel failed", __func__);
1735                             pthread_mutex_unlock(&mMutex);
1736                             return -ENOMEM;
1737                         }
1738                         //channel->getNumBuffers() will return 0 here so use
1739                         //MAX_INFLIGH_HFR_REQUESTS
1740                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1741                         newStream->priv = channel;
1742                         ALOGI("%s: num video buffers in HFR mode: %d",
1743                                 __func__, MAX_INFLIGHT_HFR_REQUESTS);
1744                     } else {
1745                         /* Copy stream contents in HFR preview only case to create
1746                          * dummy batch channel so that sensor streaming is in
1747                          * HFR mode */
1748                         if (!m_bIsVideo && (streamList->operation_mode ==
1749                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1750                             mDummyBatchStream = *newStream;
1751                         }
1752                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1753                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
1754                                 &gCamCapability[mCameraId]->padding_info,
1755                                 this,
1756                                 newStream,
1757                                 (cam_stream_type_t)
1758                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1759                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1760                                 mMetadataChannel,
1761                                 MAX_INFLIGHT_REQUESTS);
1762                         if (channel == NULL) {
1763                             ALOGE("%s: allocation of channel failed", __func__);
1764                             pthread_mutex_unlock(&mMutex);
1765                             return -ENOMEM;
1766                         }
1767                         newStream->max_buffers = channel->getNumBuffers();
1768                         newStream->priv = channel;
1769                     }
1770                     break;
1771                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1772                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1773                             mChannelHandle,
1774                             mCameraHandle->ops, captureResultCb,
1775                             &gCamCapability[mCameraId]->padding_info,
1776                             this,
1777                             newStream,
1778                             (cam_stream_type_t)
1779                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1780                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1781                             mMetadataChannel);
1782                     if (channel == NULL) {
1783                         ALOGE("%s: allocation of YUV channel failed", __func__);
1784                         pthread_mutex_unlock(&mMutex);
1785                         return -ENOMEM;
1786                     }
1787                     newStream->max_buffers = channel->getNumBuffers();
1788                     newStream->priv = channel;
1789                     break;
1790                 }
1791                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1792                 case HAL_PIXEL_FORMAT_RAW16:
1793                 case HAL_PIXEL_FORMAT_RAW10:
1794                     mRawChannel = new QCamera3RawChannel(
1795                             mCameraHandle->camera_handle, mChannelHandle,
1796                             mCameraHandle->ops, captureResultCb,
1797                             &gCamCapability[mCameraId]->padding_info,
1798                             this, newStream, CAM_QCOM_FEATURE_NONE,
1799                             mMetadataChannel,
1800                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1801                     if (mRawChannel == NULL) {
1802                         ALOGE("%s: allocation of raw channel failed", __func__);
1803                         pthread_mutex_unlock(&mMutex);
1804                         return -ENOMEM;
1805                     }
1806                     newStream->max_buffers = mRawChannel->getNumBuffers();
1807                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1808                     break;
1809                 case HAL_PIXEL_FORMAT_BLOB:
1810                     // Max live snapshot inflight buffer is 1. This is to mitigate
1811                     // frame drop issues for video snapshot. The more buffers being
1812                     // allocated, the more frame drops there are.
1813                     mPictureChannel = new QCamera3PicChannel(
1814                             mCameraHandle->camera_handle, mChannelHandle,
1815                             mCameraHandle->ops, captureResultCb,
1816                             &gCamCapability[mCameraId]->padding_info, this, newStream,
1817                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1818                             m_bIs4KVideo, isZsl, mMetadataChannel,
1819                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1820                     if (mPictureChannel == NULL) {
1821                         ALOGE("%s: allocation of channel failed", __func__);
1822                         pthread_mutex_unlock(&mMutex);
1823                         return -ENOMEM;
1824                     }
1825                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1826                     newStream->max_buffers = mPictureChannel->getNumBuffers();
1827                     mPictureChannel->overrideYuvSize(
1828                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1829                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1830                     break;
1831 
1832                 default:
1833                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1834                     break;
1835                 }
1836             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1837                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1838             } else {
1839                 ALOGE("%s: Error, Unknown stream type", __func__);
1840                 return -EINVAL;
1841             }
1842 
1843             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1844                     it != mStreamInfo.end(); it++) {
1845                 if ((*it)->stream == newStream) {
1846                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1847                     break;
1848                 }
1849             }
1850         } else {
1851             // Channel already exists for this stream
1852             // Do nothing for now
1853         }
1854 
1855     /* Do not add entries for input stream in metastream info
1856          * since there is no real stream associated with it
1857          */
1858         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1859             mStreamConfigInfo.num_streams++;
1860     }
1861 
1862     //RAW DUMP channel
1863     if (mEnableRawDump && isRawStreamRequested == false){
1864         cam_dimension_t rawDumpSize;
1865         rawDumpSize = getMaxRawSize(mCameraId);
1866         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1867                                   mChannelHandle,
1868                                   mCameraHandle->ops,
1869                                   rawDumpSize,
1870                                   &gCamCapability[mCameraId]->padding_info,
1871                                   this, CAM_QCOM_FEATURE_NONE);
1872         if (!mRawDumpChannel) {
1873             ALOGE("%s: Raw Dump channel cannot be created", __func__);
1874             pthread_mutex_unlock(&mMutex);
1875             return -ENOMEM;
1876         }
1877     }
1878 
1879 
1880     if (mAnalysisChannel) {
1881         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1882                 gCamCapability[mCameraId]->analysis_recommended_res;
1883         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1884                 CAM_STREAM_TYPE_ANALYSIS;
1885         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1886                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1887         mStreamConfigInfo.num_streams++;
1888     }
1889 
1890     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1891         mSupportChannel = new QCamera3SupportChannel(
1892                 mCameraHandle->camera_handle,
1893                 mChannelHandle,
1894                 mCameraHandle->ops,
1895                 &gCamCapability[mCameraId]->padding_info,
1896                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1897                 CAM_STREAM_TYPE_CALLBACK,
1898                 &QCamera3SupportChannel::kDim,
1899                 CAM_FORMAT_YUV_420_NV21,
1900                 this);
1901         if (!mSupportChannel) {
1902             ALOGE("%s: dummy channel cannot be created", __func__);
1903             pthread_mutex_unlock(&mMutex);
1904             return -ENOMEM;
1905         }
1906     }
1907 
1908     if (mSupportChannel) {
1909         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1910                 QCamera3SupportChannel::kDim;
1911         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1912                 CAM_STREAM_TYPE_CALLBACK;
1913         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1914                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1915         mStreamConfigInfo.num_streams++;
1916     }
1917 
1918     if (mRawDumpChannel) {
1919         cam_dimension_t rawSize;
1920         rawSize = getMaxRawSize(mCameraId);
1921         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1922                 rawSize;
1923         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1924                 CAM_STREAM_TYPE_RAW;
1925         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1926                 CAM_QCOM_FEATURE_NONE;
1927         mStreamConfigInfo.num_streams++;
1928     }
1929     /* In HFR mode, if video stream is not added, create a dummy channel so that
1930      * ISP can create a batch mode even for preview only case. This channel is
1931      * never 'start'ed (no stream-on), it is only 'initialized'  */
1932     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1933             !m_bIsVideo) {
1934         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1935                 mChannelHandle,
1936                 mCameraHandle->ops, captureResultCb,
1937                 &gCamCapability[mCameraId]->padding_info,
1938                 this,
1939                 &mDummyBatchStream,
1940                 CAM_STREAM_TYPE_VIDEO,
1941                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1942                 mMetadataChannel);
1943         if (NULL == mDummyBatchChannel) {
1944             ALOGE("%s: creation of mDummyBatchChannel failed."
1945                     "Preview will use non-hfr sensor mode ", __func__);
1946         }
1947     }
1948     if (mDummyBatchChannel) {
1949         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1950                 mDummyBatchStream.width;
1951         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1952                 mDummyBatchStream.height;
1953         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1954                 CAM_STREAM_TYPE_VIDEO;
1955         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1956                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1957         mStreamConfigInfo.num_streams++;
1958     }
1959 
1960     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1961     mStreamConfigInfo.buffer_info.max_buffers =
1962             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1963 
1964     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1965     for (pendingRequestIterator i = mPendingRequestsList.begin();
1966             i != mPendingRequestsList.end();) {
1967         i = erasePendingRequest(i);
1968     }
1969     mPendingFrameDropList.clear();
1970     // Initialize/Reset the pending buffers list
1971     mPendingBuffersMap.num_buffers = 0;
1972     mPendingBuffersMap.mPendingBufferList.clear();
1973     mPendingReprocessResultList.clear();
1974 
1975     mFirstRequest = true;
1976     mCurJpegMeta.clear();
1977     //Get min frame duration for this streams configuration
1978     deriveMinFrameDuration();
1979 
1980     /* Turn on video hint only if video stream is configured */
1981 
1982     pthread_mutex_unlock(&mMutex);
1983 
1984     return rc;
1985 }
1986 
1987 /*===========================================================================
1988  * FUNCTION   : validateCaptureRequest
1989  *
1990  * DESCRIPTION: validate a capture request from camera service
1991  *
1992  * PARAMETERS :
1993  *   @request : request from framework to process
1994  *
1995  * RETURN     :
1996  *
1997  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1998 int QCamera3HardwareInterface::validateCaptureRequest(
1999                     camera3_capture_request_t *request)
2000 {
2001     ssize_t idx = 0;
2002     const camera3_stream_buffer_t *b;
2003     CameraMetadata meta;
2004 
2005     /* Sanity check the request */
2006     if (request == NULL) {
2007         ALOGE("%s: NULL capture request", __func__);
2008         return BAD_VALUE;
2009     }
2010 
2011     if (request->settings == NULL && mFirstRequest) {
2012         /*settings cannot be null for the first request*/
2013         return BAD_VALUE;
2014     }
2015 
2016     uint32_t frameNumber = request->frame_number;
2017     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2018         ALOGE("%s: Request %d: No output buffers provided!",
2019                 __FUNCTION__, frameNumber);
2020         return BAD_VALUE;
2021     }
2022     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2023         ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
2024                 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
2025         return BAD_VALUE;
2026     }
2027     if (request->input_buffer != NULL) {
2028         b = request->input_buffer;
2029         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2030             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2031                     __func__, frameNumber, (long)idx);
2032             return BAD_VALUE;
2033         }
2034         if (b->release_fence != -1) {
2035             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2036                     __func__, frameNumber, (long)idx);
2037             return BAD_VALUE;
2038         }
2039         if (b->buffer == NULL) {
2040             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2041                     __func__, frameNumber, (long)idx);
2042             return BAD_VALUE;
2043         }
2044     }
2045 
2046     // Validate all buffers
2047     b = request->output_buffers;
2048     do {
2049         QCamera3ProcessingChannel *channel =
2050                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2051         if (channel == NULL) {
2052             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2053                     __func__, frameNumber, (long)idx);
2054             return BAD_VALUE;
2055         }
2056         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2057             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2058                     __func__, frameNumber, (long)idx);
2059             return BAD_VALUE;
2060         }
2061         if (b->release_fence != -1) {
2062             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2063                     __func__, frameNumber, (long)idx);
2064             return BAD_VALUE;
2065         }
2066         if (b->buffer == NULL) {
2067             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2068                     __func__, frameNumber, (long)idx);
2069             return BAD_VALUE;
2070         }
2071         if (*(b->buffer) == NULL) {
2072             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2073                     __func__, frameNumber, (long)idx);
2074             return BAD_VALUE;
2075         }
2076         idx++;
2077         b = request->output_buffers + idx;
2078     } while (idx < (ssize_t)request->num_output_buffers);
2079 
2080     return NO_ERROR;
2081 }
2082 
2083 /*===========================================================================
2084  * FUNCTION   : deriveMinFrameDuration
2085  *
2086  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2087  *              on currently configured streams.
2088  *
2089  * PARAMETERS : NONE
2090  *
2091  * RETURN     : NONE
2092  *
2093  *==========================================================================*/
deriveMinFrameDuration()2094 void QCamera3HardwareInterface::deriveMinFrameDuration()
2095 {
2096     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2097 
2098     maxJpegDim = 0;
2099     maxProcessedDim = 0;
2100     maxRawDim = 0;
2101 
2102     // Figure out maximum jpeg, processed, and raw dimensions
2103     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2104         it != mStreamInfo.end(); it++) {
2105 
2106         // Input stream doesn't have valid stream_type
2107         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2108             continue;
2109 
2110         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2111         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2112             if (dimension > maxJpegDim)
2113                 maxJpegDim = dimension;
2114         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2115                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2116                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2117             if (dimension > maxRawDim)
2118                 maxRawDim = dimension;
2119         } else {
2120             if (dimension > maxProcessedDim)
2121                 maxProcessedDim = dimension;
2122         }
2123     }
2124 
2125     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2126             MAX_SIZES_CNT);
2127 
2128     //Assume all jpeg dimensions are in processed dimensions.
2129     if (maxJpegDim > maxProcessedDim)
2130         maxProcessedDim = maxJpegDim;
2131     //Find the smallest raw dimension that is greater or equal to jpeg dimension
2132     if (maxProcessedDim > maxRawDim) {
2133         maxRawDim = INT32_MAX;
2134 
2135         for (size_t i = 0; i < count; i++) {
2136             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2137                     gCamCapability[mCameraId]->raw_dim[i].height;
2138             if (dimension >= maxProcessedDim && dimension < maxRawDim)
2139                 maxRawDim = dimension;
2140         }
2141     }
2142 
2143     //Find minimum durations for processed, jpeg, and raw
2144     for (size_t i = 0; i < count; i++) {
2145         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2146                 gCamCapability[mCameraId]->raw_dim[i].height) {
2147             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2148             break;
2149         }
2150     }
2151     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2152     for (size_t i = 0; i < count; i++) {
2153         if (maxProcessedDim ==
2154                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2155                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2156             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2157             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2158             break;
2159         }
2160     }
2161 }
2162 
2163 /*===========================================================================
2164  * FUNCTION   : getMinFrameDuration
2165  *
2166  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2167  *              and current request configuration.
2168  *
2169  * PARAMETERS : @request: requset sent by the frameworks
2170  *
2171  * RETURN     : min farme duration for a particular request
2172  *
2173  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2174 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2175 {
2176     bool hasJpegStream = false;
2177     bool hasRawStream = false;
2178     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2179         const camera3_stream_t *stream = request->output_buffers[i].stream;
2180         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2181             hasJpegStream = true;
2182         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2183                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2184                 stream->format == HAL_PIXEL_FORMAT_RAW16)
2185             hasRawStream = true;
2186     }
2187 
2188     if (!hasJpegStream)
2189         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2190     else
2191         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2192 }
2193 
2194 /*===========================================================================
2195  * FUNCTION   : handlePendingReprocResults
2196  *
2197  * DESCRIPTION: check and notify on any pending reprocess results
2198  *
2199  * PARAMETERS :
2200  *   @frame_number   : Pending request frame number
2201  *
2202  * RETURN     : int32_t type of status
2203  *              NO_ERROR  -- success
2204  *              none-zero failure code
2205  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2206 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2207 {
2208     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2209             j != mPendingReprocessResultList.end(); j++) {
2210         if (j->frame_number == frame_number) {
2211             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2212 
2213             CDBG("%s: Delayed reprocess notify %d", __func__,
2214                     frame_number);
2215 
2216             for (pendingRequestIterator k = mPendingRequestsList.begin();
2217                     k != mPendingRequestsList.end(); k++) {
2218 
2219                 if (k->frame_number == j->frame_number) {
2220                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2221                             "Take it out!!", __func__,
2222                             k->frame_number);
2223 
2224                     camera3_capture_result result;
2225                     memset(&result, 0, sizeof(camera3_capture_result));
2226                     result.frame_number = frame_number;
2227                     result.num_output_buffers = 1;
2228                     result.output_buffers =  &j->buffer;
2229                     result.input_buffer = k->input_buffer;
2230                     result.result = k->settings;
2231                     result.partial_result = PARTIAL_RESULT_COUNT;
2232                     mCallbackOps->process_capture_result(mCallbackOps, &result);
2233 
2234                     erasePendingRequest(k);
2235                     break;
2236                 }
2237             }
2238             mPendingReprocessResultList.erase(j);
2239             break;
2240         }
2241     }
2242     return NO_ERROR;
2243 }
2244 
2245 /*===========================================================================
2246  * FUNCTION   : handleBatchMetadata
2247  *
2248  * DESCRIPTION: Handles metadata buffer callback in batch mode
2249  *
2250  * PARAMETERS : @metadata_buf: metadata buffer
2251  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2252  *                 the meta buf in this method
2253  *
2254  * RETURN     :
2255  *
2256  *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2257 void QCamera3HardwareInterface::handleBatchMetadata(
2258         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2259 {
2260     ATRACE_CALL();
2261 
2262     if (NULL == metadata_buf) {
2263         ALOGE("%s: metadata_buf is NULL", __func__);
2264         return;
2265     }
2266     /* In batch mode, the metdata will contain the frame number and timestamp of
2267      * the last frame in the batch. Eg: a batch containing buffers from request
2268      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2269      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2270      * multiple process_capture_results */
2271     metadata_buffer_t *metadata =
2272             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2273     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2274     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2275     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2276     uint32_t frame_number = 0, urgent_frame_number = 0;
2277     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2278     bool invalid_metadata = false;
2279     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2280     size_t loopCount = 1;
2281 
2282     int32_t *p_frame_number_valid =
2283             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2284     uint32_t *p_frame_number =
2285             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2286     int64_t *p_capture_time =
2287             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2288     int32_t *p_urgent_frame_number_valid =
2289             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2290     uint32_t *p_urgent_frame_number =
2291             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2292 
2293     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2294             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2295             (NULL == p_urgent_frame_number)) {
2296         ALOGE("%s: Invalid metadata", __func__);
2297         invalid_metadata = true;
2298     } else {
2299         frame_number_valid = *p_frame_number_valid;
2300         last_frame_number = *p_frame_number;
2301         last_frame_capture_time = *p_capture_time;
2302         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2303         last_urgent_frame_number = *p_urgent_frame_number;
2304     }
2305 
2306     /* In batchmode, when no video buffers are requested, set_parms are sent
2307      * for every capture_request. The difference between consecutive urgent
2308      * frame numbers and frame numbers should be used to interpolate the
2309      * corresponding frame numbers and time stamps */
2310     pthread_mutex_lock(&mMutex);
2311     if (urgent_frame_number_valid) {
2312         first_urgent_frame_number =
2313                 mPendingBatchMap.valueFor(last_urgent_frame_number);
2314         urgentFrameNumDiff = last_urgent_frame_number + 1 -
2315                 first_urgent_frame_number;
2316 
2317         CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
2318                 __func__, urgent_frame_number_valid,
2319                 first_urgent_frame_number, last_urgent_frame_number);
2320     }
2321 
2322     if (frame_number_valid) {
2323         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2324         frameNumDiff = last_frame_number + 1 -
2325                 first_frame_number;
2326         mPendingBatchMap.removeItem(last_frame_number);
2327 
2328         CDBG_HIGH("%s:        frm: valid: %d frm_num: %d - %d",
2329                 __func__, frame_number_valid,
2330                 first_frame_number, last_frame_number);
2331 
2332     }
2333     pthread_mutex_unlock(&mMutex);
2334 
2335     if (urgent_frame_number_valid || frame_number_valid) {
2336         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2337         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2338             ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2339                     __func__, urgentFrameNumDiff, last_urgent_frame_number);
2340         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2341             ALOGE("%s: frameNumDiff: %d frameNum: %d",
2342                     __func__, frameNumDiff, last_frame_number);
2343     }
2344 
2345     for (size_t i = 0; i < loopCount; i++) {
2346         /* handleMetadataWithLock is called even for invalid_metadata for
2347          * pipeline depth calculation */
2348         if (!invalid_metadata) {
2349             /* Infer frame number. Batch metadata contains frame number of the
2350              * last frame */
2351             if (urgent_frame_number_valid) {
2352                 if (i < urgentFrameNumDiff) {
2353                     urgent_frame_number =
2354                             first_urgent_frame_number + i;
2355                     CDBG("%s: inferred urgent frame_number: %d",
2356                             __func__, urgent_frame_number);
2357                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2358                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2359                 } else {
2360                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2361                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2362                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2363                 }
2364             }
2365 
2366             /* Infer frame number. Batch metadata contains frame number of the
2367              * last frame */
2368             if (frame_number_valid) {
2369                 if (i < frameNumDiff) {
2370                     frame_number = first_frame_number + i;
2371                     CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2372                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2373                             CAM_INTF_META_FRAME_NUMBER, frame_number);
2374                 } else {
2375                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2376                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2377                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2378                 }
2379             }
2380 
2381             if (last_frame_capture_time) {
2382                 //Infer timestamp
2383                 first_frame_capture_time = last_frame_capture_time -
2384                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2385                 capture_time =
2386                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2387                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2388                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2389                 CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
2390                         __func__, last_frame_capture_time, capture_time);
2391             }
2392         }
2393         pthread_mutex_lock(&mMutex);
2394         handleMetadataWithLock(metadata_buf,
2395                 false /* free_and_bufdone_meta_buf */,
2396                 (i == 0) /* first metadata in the batch metadata */);
2397         pthread_mutex_unlock(&mMutex);
2398     }
2399 
2400 done_batch_metadata:
2401     /* BufDone metadata buffer */
2402     if (free_and_bufdone_meta_buf) {
2403         mMetadataChannel->bufDone(metadata_buf);
2404         free(metadata_buf);
2405     }
2406 }
2407 
2408 /*===========================================================================
2409  * FUNCTION   : handleMetadataWithLock
2410  *
2411  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2412  *
2413  * PARAMETERS : @metadata_buf: metadata buffer
2414  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2415  *                 the meta buf in this method
2416  *              @firstMetadataInBatch: Boolean to indicate whether this is the
2417  *                  first metadata in a batch. Valid only for batch mode
2418  *
2419  * RETURN     :
2420  *
2421  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool firstMetadataInBatch)2422 void QCamera3HardwareInterface::handleMetadataWithLock(
2423     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2424     bool firstMetadataInBatch)
2425 {
2426     ATRACE_CALL();
2427 
2428     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2429     int32_t frame_number_valid, urgent_frame_number_valid;
2430     uint32_t frame_number, urgent_frame_number;
2431     int64_t capture_time;
2432     bool unfinished_raw_request = false;
2433 
2434     int32_t *p_frame_number_valid =
2435             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2436     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2437     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2438     int32_t *p_urgent_frame_number_valid =
2439             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2440     uint32_t *p_urgent_frame_number =
2441             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2442     IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2443             metadata) {
2444         CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2445                 __func__, *p_frame_number_valid, *p_frame_number);
2446     }
2447 
2448     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2449             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2450         ALOGE("%s: Invalid metadata", __func__);
2451         if (free_and_bufdone_meta_buf) {
2452             mMetadataChannel->bufDone(metadata_buf);
2453             free(metadata_buf);
2454         }
2455         goto done_metadata;
2456     } else {
2457         frame_number_valid = *p_frame_number_valid;
2458         frame_number = *p_frame_number;
2459         capture_time = *p_capture_time;
2460         urgent_frame_number_valid = *p_urgent_frame_number_valid;
2461         urgent_frame_number = *p_urgent_frame_number;
2462     }
2463     //Partial result on process_capture_result for timestamp
2464     if (urgent_frame_number_valid) {
2465         CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2466           __func__, urgent_frame_number, capture_time);
2467 
2468         //Recieved an urgent Frame Number, handle it
2469         //using partial results
2470         for (pendingRequestIterator i =
2471                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2472             CDBG("%s: Iterator Frame = %d urgent frame = %d",
2473                 __func__, i->frame_number, urgent_frame_number);
2474 
2475             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2476                 (i->partial_result_cnt == 0)) {
2477                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2478                     __func__, i->frame_number);
2479             }
2480 
2481             if (i->frame_number == urgent_frame_number &&
2482                      i->bUrgentReceived == 0) {
2483 
2484                 camera3_capture_result_t result;
2485                 memset(&result, 0, sizeof(camera3_capture_result_t));
2486 
2487                 i->partial_result_cnt++;
2488                 i->bUrgentReceived = 1;
2489                 // Extract 3A metadata
2490                 result.result =
2491                     translateCbUrgentMetadataToResultMetadata(metadata);
2492                 // Populate metadata result
2493                 result.frame_number = urgent_frame_number;
2494                 result.num_output_buffers = 0;
2495                 result.output_buffers = NULL;
2496                 result.partial_result = i->partial_result_cnt;
2497 
2498                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2499                 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2500                      __func__, result.frame_number, capture_time);
2501                 free_camera_metadata((camera_metadata_t *)result.result);
2502                 break;
2503             }
2504         }
2505     }
2506 
2507     if (!frame_number_valid) {
2508         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2509         if (free_and_bufdone_meta_buf) {
2510             mMetadataChannel->bufDone(metadata_buf);
2511             free(metadata_buf);
2512         }
2513         goto done_metadata;
2514     }
2515     CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2516             frame_number, capture_time);
2517 
2518     for (pendingRequestIterator i = mPendingRequestsList.begin();
2519             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2520         // Flush out all entries with less or equal frame numbers.
2521 
2522         camera3_capture_result_t result;
2523         memset(&result, 0, sizeof(camera3_capture_result_t));
2524 
2525         CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2526 
2527         // Check whether any stream buffer corresponding to this is dropped or not
2528         // If dropped, then send the ERROR_BUFFER for the corresponding stream
2529         // The API does not expect a blob buffer to be dropped
2530         if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2531             /* Clear notify_msg structure */
2532             camera3_notify_msg_t notify_msg;
2533             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2534             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2535                     j != i->buffers.end(); j++) {
2536                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2537                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2538                 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2539                     if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2540                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2541                         ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2542                                 __func__, i->frame_number, streamID, j->stream->format);
2543                         notify_msg.type = CAMERA3_MSG_ERROR;
2544                         notify_msg.message.error.frame_number = i->frame_number;
2545                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2546                         notify_msg.message.error.error_stream = j->stream;
2547                         mCallbackOps->notify(mCallbackOps, &notify_msg);
2548                         ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2549                                 __func__, i->frame_number, streamID, j->stream->format);
2550                         PendingFrameDropInfo PendingFrameDrop;
2551                         PendingFrameDrop.frame_number=i->frame_number;
2552                         PendingFrameDrop.stream_ID = streamID;
2553                         // Add the Frame drop info to mPendingFrameDropList
2554                         mPendingFrameDropList.push_back(PendingFrameDrop);
2555                    }
2556                }
2557             }
2558         }
2559 
2560         // Send empty metadata with already filled buffers for dropped metadata
2561         // and send valid metadata with already filled buffers for current metadata
2562         /* we could hit this case when we either
2563          * 1. have a pending reprocess request or
2564          * 2. miss a metadata buffer callback */
2565         if (i->frame_number < frame_number) {
2566             if (i->input_buffer) {
2567                 /* this will be handled in handleInputBufferWithLock */
2568                 i++;
2569                 continue;
2570             } else if (i->need_dynamic_blklvl) {
2571                 unfinished_raw_request = true;
2572                 // i->partial_result_cnt--;
2573                 CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
2574                         __func__, i->frame_number, i->partial_result_cnt);
2575                 i++;
2576                 continue;
2577             } else if (i->pending_extra_result) {
2578                 CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
2579                         __func__, i->frame_number, i->partial_result_cnt,
2580                         i->need_dynamic_blklvl);
2581                 // i->partial_result_cnt--;
2582                 i++;
2583                 continue;
2584             } else {
2585                 ALOGE("%s: Missing metadata buffer for frame number %d, reporting CAMERA3_MSG_ERROR_RESULT",
2586                      __func__, i->frame_number);
2587 
2588                 mPendingLiveRequest--;
2589 
2590                 CameraMetadata dummyMetadata;
2591                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
2592                 result.result = dummyMetadata.release();
2593 
2594                 camera3_notify_msg_t notify_msg;
2595                 memset(&notify_msg, 0, sizeof(notify_msg));
2596                 notify_msg.type = CAMERA3_MSG_ERROR;
2597                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
2598                 notify_msg.message.error.error_stream = NULL;
2599                 notify_msg.message.error.frame_number = i->frame_number;
2600                 mCallbackOps->notify(mCallbackOps, &notify_msg);
2601             }
2602         } else {
2603             i->partial_result_cnt++;
2604             CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
2605                     __func__, i->frame_number, i->need_dynamic_blklvl,
2606                     i->partial_result_cnt);
2607             if (!i->need_dynamic_blklvl) {
2608                 CDBG("%s, meta for request without raw, frame number: %d\n",
2609                         __func__, i->frame_number);
2610                 if (!unfinished_raw_request) {
2611                     i->partial_result_cnt++;
2612                     CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
2613                             __func__, i->partial_result_cnt);
2614                 }
2615             }
2616 
2617             result.partial_result = i->partial_result_cnt;
2618 
2619             /* Clear notify_msg structure */
2620             camera3_notify_msg_t notify_msg;
2621             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2622 
2623             // Send shutter notify to frameworks
2624             notify_msg.type = CAMERA3_MSG_SHUTTER;
2625             notify_msg.message.shutter.frame_number = i->frame_number;
2626             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2627             mCallbackOps->notify(mCallbackOps, &notify_msg);
2628 
2629             i->timestamp = capture_time;
2630 
2631             // Find channel requiring metadata, meaning internal offline postprocess
2632             // is needed.
2633             //TODO: for now, we don't support two streams requiring metadata at the same time.
2634             // (because we are not making copies, and metadata buffer is not reference counted.
2635             bool internalPproc = false;
2636             for (pendingBufferIterator iter = i->buffers.begin();
2637                     iter != i->buffers.end(); iter++) {
2638                 if (iter->need_metadata) {
2639                     internalPproc = true;
2640                     QCamera3ProcessingChannel *channel =
2641                             (QCamera3ProcessingChannel *)iter->stream->priv;
2642                     channel->queueReprocMetadata(metadata_buf);
2643                     break;
2644                 }
2645             }
2646 
2647             result.result = translateFromHalMetadata(metadata,
2648                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2649                     i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
2650                     firstMetadataInBatch);
2651 
2652             saveExifParams(metadata);
2653 
2654             if (i->blob_request) {
2655                 {
2656                     //Dump tuning metadata if enabled and available
2657                     char prop[PROPERTY_VALUE_MAX];
2658                     memset(prop, 0, sizeof(prop));
2659                     property_get("persist.camera.dumpmetadata", prop, "0");
2660                     int32_t enabled = atoi(prop);
2661                     if (enabled && metadata->is_tuning_params_valid) {
2662                         dumpMetadataToFile(metadata->tuning_params,
2663                                mMetaFrameCount,
2664                                enabled,
2665                                "Snapshot",
2666                                frame_number);
2667                     }
2668                 }
2669             }
2670 
2671             if (!internalPproc) {
2672                 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2673                 // Return metadata buffer
2674                 if (free_and_bufdone_meta_buf) {
2675                     mMetadataChannel->bufDone(metadata_buf);
2676                     free(metadata_buf);
2677                 }
2678             }
2679         }
2680         if (!result.result) {
2681             ALOGE("%s: metadata is NULL", __func__);
2682         }
2683         result.frame_number = i->frame_number;
2684         result.input_buffer = i->input_buffer;
2685         result.num_output_buffers = 0;
2686         result.output_buffers = NULL;
2687         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2688                     j != i->buffers.end(); j++) {
2689             if (j->buffer) {
2690                 result.num_output_buffers++;
2691             }
2692         }
2693 
2694         if (result.num_output_buffers > 0) {
2695             camera3_stream_buffer_t *result_buffers =
2696                 new camera3_stream_buffer_t[result.num_output_buffers];
2697             if (!result_buffers) {
2698                 ALOGE("%s: Fatal error: out of memory", __func__);
2699             }
2700             size_t result_buffers_idx = 0;
2701             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2702                     j != i->buffers.end(); j++) {
2703                 if (j->buffer) {
2704                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2705                             m != mPendingFrameDropList.end(); m++) {
2706                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2707                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2708                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2709                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2710                             ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2711                                   __func__, frame_number, streamID);
2712                             m = mPendingFrameDropList.erase(m);
2713                             break;
2714                         }
2715                     }
2716 
2717                     for (List<PendingBufferInfo>::iterator k =
2718                       mPendingBuffersMap.mPendingBufferList.begin();
2719                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2720                       if (k->buffer == j->buffer->buffer) {
2721                         CDBG("%s: Found buffer %p in pending buffer List "
2722                               "for frame %u, Take it out!!", __func__,
2723                                k->buffer, k->frame_number);
2724                         mPendingBuffersMap.num_buffers--;
2725                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
2726                         break;
2727                       }
2728                     }
2729 
2730                     result_buffers[result_buffers_idx++] = *(j->buffer);
2731                     free(j->buffer);
2732                     j->buffer = NULL;
2733                 }
2734             }
2735             result.output_buffers = result_buffers;
2736             mCallbackOps->process_capture_result(mCallbackOps, &result);
2737             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2738                     __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2739             free_camera_metadata((camera_metadata_t *)result.result);
2740             delete[] result_buffers;
2741         } else {
2742             mCallbackOps->process_capture_result(mCallbackOps, &result);
2743             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2744                         __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2745             free_camera_metadata((camera_metadata_t *)result.result);
2746         }
2747 
2748         if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2749             mPendingLiveRequest--;
2750             i = erasePendingRequest(i);
2751         } else {
2752             CDBG("%s, keep in list, frame number:%d, partial result:%d",
2753                     __func__, i->frame_number, i->partial_result_cnt);
2754             i->pending_extra_result = true;
2755             i++;
2756         }
2757 
2758         if (!mPendingReprocessResultList.empty()) {
2759             handlePendingReprocResults(frame_number + 1);
2760         }
2761 
2762     }
2763 
2764 done_metadata:
2765     for (pendingRequestIterator i = mPendingRequestsList.begin();
2766             i != mPendingRequestsList.end() ;i++) {
2767         i->pipeline_depth++;
2768     }
2769     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2770     unblockRequestIfNecessary();
2771 
2772 }
2773 
2774 /*===========================================================================
2775  * FUNCTION   : hdrPlusPerfLock
2776  *
2777  * DESCRIPTION: perf lock for HDR+ using custom intent
2778  *
2779  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2780  *
2781  * RETURN     : None
2782  *
2783  *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)2784 void QCamera3HardwareInterface::hdrPlusPerfLock(
2785         mm_camera_super_buf_t *metadata_buf)
2786 {
2787     if (NULL == metadata_buf) {
2788         ALOGE("%s: metadata_buf is NULL", __func__);
2789         return;
2790     }
2791     metadata_buffer_t *metadata =
2792             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2793     int32_t *p_frame_number_valid =
2794             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2795     uint32_t *p_frame_number =
2796             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2797 
2798     //acquire perf lock for 5 sec after the last HDR frame is captured
2799     if (*p_frame_number_valid) {
2800         if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2801             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2802         }
2803     }
2804 
2805     //release lock after perf lock timer is expired. If lock is already released,
2806     //isTimerReset returns false
2807     if (m_perfLock.isTimerReset()) {
2808         mLastCustIntentFrmNum = -1;
2809         m_perfLock.lock_rel_timed();
2810     }
2811 }
2812 
2813 /*===========================================================================
2814  * FUNCTION   : handleInputBufferWithLock
2815  *
2816  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2817  *
2818  * PARAMETERS : @frame_number: frame number of the input buffer
2819  *
2820  * RETURN     :
2821  *
2822  *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)2823 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2824 {
2825     ATRACE_CALL();
2826     pendingRequestIterator i = mPendingRequestsList.begin();
2827     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2828         i++;
2829     }
2830     if (i != mPendingRequestsList.end() && i->input_buffer) {
2831         //found the right request
2832         if (!i->shutter_notified) {
2833             CameraMetadata settings;
2834             camera3_notify_msg_t notify_msg;
2835             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2836             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2837             if(i->settings) {
2838                 settings = i->settings;
2839                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2840                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2841                 } else {
2842                     ALOGE("%s: No timestamp in input settings! Using current one.",
2843                             __func__);
2844                 }
2845             } else {
2846                 ALOGE("%s: Input settings missing!", __func__);
2847             }
2848 
2849             notify_msg.type = CAMERA3_MSG_SHUTTER;
2850             notify_msg.message.shutter.frame_number = frame_number;
2851             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2852             mCallbackOps->notify(mCallbackOps, &notify_msg);
2853             i->shutter_notified = true;
2854             CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2855                        __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2856         }
2857 
2858         if (i->input_buffer->release_fence != -1) {
2859            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2860            close(i->input_buffer->release_fence);
2861            if (rc != OK) {
2862                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2863            }
2864         }
2865 
2866         camera3_capture_result result;
2867         memset(&result, 0, sizeof(camera3_capture_result));
2868         result.frame_number = frame_number;
2869         result.result = i->settings;
2870         result.input_buffer = i->input_buffer;
2871         result.partial_result = PARTIAL_RESULT_COUNT;
2872 
2873         mCallbackOps->process_capture_result(mCallbackOps, &result);
2874         CDBG("%s: Input request metadata and input buffer frame_number = %u",
2875                        __func__, i->frame_number);
2876         i = erasePendingRequest(i);
2877     } else {
2878         ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2879     }
2880 }
2881 
getBlackLevelRegion(int (& opticalBlackRegions)[4])2882 bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
2883 {
2884     if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
2885         /*just calculate one region black level and send to fwk*/
2886         for (size_t i = 0; i <  4; i++) {
2887             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
2888         }
2889         return TRUE;
2890     }
2891 
2892     return FALSE;
2893 }
2894 
sendDynamicBlackLevel(float blacklevel[4],uint32_t frame_number)2895 void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
2896 {
2897     CDBG("%s, E.\n", __func__);
2898     pthread_mutex_lock(&mMutex);
2899     sendDynamicBlackLevelWithLock(blacklevel, frame_number);
2900     pthread_mutex_unlock(&mMutex);
2901     CDBG("%s, X.\n", __func__);
2902 }
2903 
sendDynamicBlackLevelWithLock(float blacklevel[4],uint32_t frame_number)2904 void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
2905 {
2906     CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
2907 
2908     pendingRequestIterator i = mPendingRequestsList.begin();
2909     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2910         i++;
2911     }
2912     if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
2913         ALOGE("%s, error: invalid frame number.", __func__);
2914         return;
2915     }
2916 
2917     i->partial_result_cnt++;
2918 
2919     CameraMetadata camMetadata;
2920     int64_t fwk_frame_number = (int64_t)frame_number;
2921     camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2922 
2923     // update dynamic black level here
2924     camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
2925 
2926     camera3_capture_result_t result;
2927     memset(&result, 0, sizeof(camera3_capture_result_t));
2928     result.frame_number = frame_number;
2929     result.num_output_buffers = 0;
2930     result.result = camMetadata.release();
2931     result.partial_result = i->partial_result_cnt;
2932 
2933     CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
2934             __func__, result.partial_result, frame_number, i->pending_extra_result);
2935     mCallbackOps->process_capture_result(mCallbackOps, &result);
2936     free_camera_metadata((camera_metadata_t *)result.result);
2937 
2938     if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2939         CDBG("%s, remove cur request from pending list.", __func__);
2940         mPendingLiveRequest--;
2941         i = erasePendingRequest(i);
2942 
2943         // traverse the remaining pending list to see whether need to send cached ones..
2944         while (i != mPendingRequestsList.end()) {
2945             CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
2946                     __func__, i->frame_number, i->partial_result_cnt,
2947                     i->pending_extra_result);
2948 
2949             if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
2950                     && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
2951                 // send out final result, and remove it from pending list.
2952                 CameraMetadata camMetadata;
2953                 int64_t fwk_frame_number = (int64_t)i->frame_number;
2954                 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2955 
2956                 memset(&result, 0, sizeof(camera3_capture_result_t));
2957                 result.frame_number = i->frame_number;
2958                 result.num_output_buffers = 0;
2959                 result.result = camMetadata.release();
2960                 result.partial_result = i->partial_result_cnt + 1;
2961 
2962                 mCallbackOps->process_capture_result(mCallbackOps, &result);
2963                 free_camera_metadata((camera_metadata_t *)result.result);
2964 
2965                 mPendingLiveRequest--;
2966                 i = erasePendingRequest(i);
2967                 CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
2968                         __func__, mPendingLiveRequest, mPendingRequestsList.size());
2969             } else {
2970                 break;
2971             }
2972         }
2973     }
2974 
2975     unblockRequestIfNecessary();
2976     CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
2977 }
2978 
2979 
2980 /*===========================================================================
2981  * FUNCTION   : handleBufferWithLock
2982  *
2983  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2984  *
2985  * PARAMETERS : @buffer: image buffer for the callback
2986  *              @frame_number: frame number of the image buffer
2987  *
2988  * RETURN     :
2989  *
2990  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)2991 void QCamera3HardwareInterface::handleBufferWithLock(
2992     camera3_stream_buffer_t *buffer, uint32_t frame_number)
2993 {
2994     ATRACE_CALL();
2995     // If the frame number doesn't exist in the pending request list,
2996     // directly send the buffer to the frameworks, and update pending buffers map
2997     // Otherwise, book-keep the buffer.
2998     pendingRequestIterator i = mPendingRequestsList.begin();
2999     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3000         i++;
3001     }
3002     if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
3003         if (i != mPendingRequestsList.end()) {
3004             // though the pendingRequestInfo is still in the list,
3005             // still send the buffer directly, as the pending_extra_result is true,
3006             // and we've already received meta for this frame number.
3007             CDBG("%s, send the buffer directly, frame number:%d",
3008                     __func__, i->frame_number);
3009         }
3010         // Verify all pending requests frame_numbers are greater
3011         for (pendingRequestIterator j = mPendingRequestsList.begin();
3012                 j != mPendingRequestsList.end(); j++) {
3013             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3014                 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
3015                         __func__, j->frame_number, frame_number);
3016             }
3017         }
3018         camera3_capture_result_t result;
3019         memset(&result, 0, sizeof(camera3_capture_result_t));
3020         result.result = NULL;
3021         result.frame_number = frame_number;
3022         result.num_output_buffers = 1;
3023         result.partial_result = 0;
3024         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3025                 m != mPendingFrameDropList.end(); m++) {
3026             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3027             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3028             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3029                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3030                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
3031                         __func__, frame_number, streamID);
3032                 m = mPendingFrameDropList.erase(m);
3033                 break;
3034             }
3035         }
3036         result.output_buffers = buffer;
3037         CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
3038                 __func__, frame_number, buffer->buffer);
3039 
3040         for (List<PendingBufferInfo>::iterator k =
3041                 mPendingBuffersMap.mPendingBufferList.begin();
3042                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3043             if (k->buffer == buffer->buffer) {
3044                 CDBG("%s: Found Frame buffer, take it out from list",
3045                         __func__);
3046 
3047                 mPendingBuffersMap.num_buffers--;
3048                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
3049                 break;
3050             }
3051         }
3052         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3053             __func__, mPendingBuffersMap.num_buffers);
3054 
3055         mCallbackOps->process_capture_result(mCallbackOps, &result);
3056     } else {
3057         if (i->input_buffer) {
3058             CameraMetadata settings;
3059             camera3_notify_msg_t notify_msg;
3060             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3061             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3062             if(i->settings) {
3063                 settings = i->settings;
3064                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3065                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3066                 } else {
3067                     ALOGE("%s: No timestamp in input settings! Using current one.",
3068                             __func__);
3069                 }
3070             } else {
3071                 ALOGE("%s: Input settings missing!", __func__);
3072             }
3073 
3074             notify_msg.type = CAMERA3_MSG_SHUTTER;
3075             notify_msg.message.shutter.frame_number = frame_number;
3076             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3077 
3078             if (i->input_buffer->release_fence != -1) {
3079                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3080                close(i->input_buffer->release_fence);
3081                if (rc != OK) {
3082                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3083                }
3084             }
3085 
3086             for (List<PendingBufferInfo>::iterator k =
3087                     mPendingBuffersMap.mPendingBufferList.begin();
3088                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3089                 if (k->buffer == buffer->buffer) {
3090                     CDBG("%s: Found Frame buffer, take it out from list",
3091                             __func__);
3092 
3093                     mPendingBuffersMap.num_buffers--;
3094                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
3095                     break;
3096                 }
3097             }
3098             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3099                 __func__, mPendingBuffersMap.num_buffers);
3100 
3101             bool notifyNow = true;
3102             for (pendingRequestIterator j = mPendingRequestsList.begin();
3103                     j != mPendingRequestsList.end(); j++) {
3104                 if (j->frame_number < frame_number) {
3105                     notifyNow = false;
3106                     break;
3107                 }
3108             }
3109 
3110             if (notifyNow) {
3111                 camera3_capture_result result;
3112                 memset(&result, 0, sizeof(camera3_capture_result));
3113                 result.frame_number = frame_number;
3114                 result.result = i->settings;
3115                 result.input_buffer = i->input_buffer;
3116                 result.num_output_buffers = 1;
3117                 result.output_buffers = buffer;
3118                 result.partial_result = PARTIAL_RESULT_COUNT;
3119 
3120                 mCallbackOps->notify(mCallbackOps, &notify_msg);
3121                 mCallbackOps->process_capture_result(mCallbackOps, &result);
3122                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
3123                 i = erasePendingRequest(i);
3124             } else {
3125                 // Cache reprocess result for later
3126                 PendingReprocessResult pendingResult;
3127                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3128                 pendingResult.notify_msg = notify_msg;
3129                 pendingResult.buffer = *buffer;
3130                 pendingResult.frame_number = frame_number;
3131                 mPendingReprocessResultList.push_back(pendingResult);
3132                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
3133             }
3134         } else {
3135             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3136                 j != i->buffers.end(); j++) {
3137                 if (j->stream == buffer->stream) {
3138                     if (j->buffer != NULL) {
3139                         ALOGE("%s: Error: buffer is already set", __func__);
3140                     } else {
3141                         j->buffer = (camera3_stream_buffer_t *)malloc(
3142                             sizeof(camera3_stream_buffer_t));
3143                         *(j->buffer) = *buffer;
3144                         CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
3145                             __func__, buffer, frame_number);
3146                     }
3147                 }
3148             }
3149         }
3150     }
3151 }
3152 
3153 /*===========================================================================
3154  * FUNCTION   : unblockRequestIfNecessary
3155  *
3156  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3157  *              that mMutex is held when this function is called.
3158  *
3159  * PARAMETERS :
3160  *
3161  * RETURN     :
3162  *
3163  *==========================================================================*/
unblockRequestIfNecessary()3164 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3165 {
3166    // Unblock process_capture_request
3167    pthread_cond_signal(&mRequestCond);
3168 }
3169 
3170 
3171 /*===========================================================================
3172  * FUNCTION   : processCaptureRequest
3173  *
3174  * DESCRIPTION: process a capture request from camera service
3175  *
3176  * PARAMETERS :
3177  *   @request : request from framework to process
3178  *
3179  * RETURN     :
3180  *
3181  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3182 int QCamera3HardwareInterface::processCaptureRequest(
3183                     camera3_capture_request_t *request)
3184 {
3185     ATRACE_CALL();
3186     int rc = NO_ERROR;
3187     int32_t request_id;
3188     CameraMetadata meta;
3189     uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3190     uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3191     bool isVidBufRequested = false;
3192     camera3_stream_buffer_t *pInputBuffer = NULL;
3193 
3194     pthread_mutex_lock(&mMutex);
3195 
3196     rc = validateCaptureRequest(request);
3197     if (rc != NO_ERROR) {
3198         ALOGE("%s: incoming request is not valid", __func__);
3199         pthread_mutex_unlock(&mMutex);
3200         return rc;
3201     }
3202 
3203     meta = request->settings;
3204 
3205     // For first capture request, send capture intent, and
3206     // stream on all streams
3207     if (mFirstRequest) {
3208         // send an unconfigure to the backend so that the isp
3209         // resources are deallocated
3210         if (!mFirstConfiguration) {
3211             cam_stream_size_info_t stream_config_info;
3212             int32_t hal_version = CAM_HAL_V3;
3213             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3214             stream_config_info.buffer_info.min_buffers =
3215                     MIN_INFLIGHT_REQUESTS;
3216             stream_config_info.buffer_info.max_buffers =
3217                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3218             clear_metadata_buffer(mParameters);
3219             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3220                     CAM_INTF_PARM_HAL_VERSION, hal_version);
3221             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3222                     CAM_INTF_META_STREAM_INFO, stream_config_info);
3223             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3224                     mParameters);
3225             if (rc < 0) {
3226                 ALOGE("%s: set_parms for unconfigure failed", __func__);
3227                 pthread_mutex_unlock(&mMutex);
3228                 return rc;
3229             }
3230         }
3231         m_perfLock.lock_acq();
3232         /* get eis information for stream configuration */
3233         cam_is_type_t is_type;
3234         char is_type_value[PROPERTY_VALUE_MAX];
3235         property_get("persist.camera.is_type", is_type_value, "0");
3236         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3237 
3238         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3239             int32_t hal_version = CAM_HAL_V3;
3240             uint8_t captureIntent =
3241                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3242             mCaptureIntent = captureIntent;
3243             clear_metadata_buffer(mParameters);
3244             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3245             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3246         }
3247 
3248         //If EIS is enabled, turn it on for video
3249         bool setEis = m_bEisEnable && m_bEisSupportedSize;
3250         int32_t vsMode;
3251         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3252         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3253             rc = BAD_VALUE;
3254         }
3255 
3256         //IS type will be 0 unless EIS is supported. If EIS is supported
3257         //it could either be 1 or 4 depending on the stream and video size
3258         if (setEis) {
3259             if (!m_bEisSupportedSize) {
3260                 is_type = IS_TYPE_DIS;
3261             } else {
3262                 is_type = IS_TYPE_EIS_2_0;
3263             }
3264             mStreamConfigInfo.is_type = is_type;
3265         } else {
3266             mStreamConfigInfo.is_type = IS_TYPE_NONE;
3267         }
3268 
3269         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3270                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3271         int32_t tintless_value = 1;
3272         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3273                 CAM_INTF_PARM_TINTLESS, tintless_value);
3274         //Disable CDS for HFR mode and if mPprocBypass = true.
3275         //CDS is a session parameter in the backend/ISP, so need to be set/reset
3276         //after every configure_stream
3277         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3278                 (m_bIsVideo)) {
3279             int32_t cds = CAM_CDS_MODE_OFF;
3280             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3281                     CAM_INTF_PARM_CDS_MODE, cds))
3282                 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3283 
3284         }
3285         setMobicat();
3286 
3287         /* Set fps and hfr mode while sending meta stream info so that sensor
3288          * can configure appropriate streaming mode */
3289         mHFRVideoFps = DEFAULT_VIDEO_FPS;
3290         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3291             rc = setHalFpsRange(meta, mParameters);
3292             if (rc != NO_ERROR) {
3293                 ALOGE("%s: setHalFpsRange failed", __func__);
3294             }
3295         }
3296         if (meta.exists(ANDROID_CONTROL_MODE)) {
3297             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3298             rc = extractSceneMode(meta, metaMode, mParameters);
3299             if (rc != NO_ERROR) {
3300                 ALOGE("%s: extractSceneMode failed", __func__);
3301             }
3302         }
3303 
3304         //TODO: validate the arguments, HSV scenemode should have only the
3305         //advertised fps ranges
3306 
3307         /*set the capture intent, hal version, tintless, stream info,
3308          *and disenable parameters to the backend*/
3309         CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3310         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3311                     mParameters);
3312 
3313         cam_dimension_t sensor_dim;
3314         memset(&sensor_dim, 0, sizeof(sensor_dim));
3315         rc = getSensorOutputSize(sensor_dim);
3316         if (rc != NO_ERROR) {
3317             ALOGE("%s: Failed to get sensor output size", __func__);
3318             pthread_mutex_unlock(&mMutex);
3319             goto error_exit;
3320         }
3321 
3322         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3323                 gCamCapability[mCameraId]->active_array_size.height,
3324                 sensor_dim.width, sensor_dim.height);
3325 
3326         /* Set batchmode before initializing channel. Since registerBuffer
3327          * internally initializes some of the channels, better set batchmode
3328          * even before first register buffer */
3329         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3330             it != mStreamInfo.end(); it++) {
3331             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3332             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3333                     && mBatchSize) {
3334                 rc = channel->setBatchSize(mBatchSize);
3335                 //Disable per frame map unmap for HFR/batchmode case
3336                 rc |= channel->setPerFrameMapUnmap(false);
3337                 if (NO_ERROR != rc) {
3338                     ALOGE("%s : Channel init failed %d", __func__, rc);
3339                     pthread_mutex_unlock(&mMutex);
3340                     goto error_exit;
3341                 }
3342             }
3343         }
3344 
3345         //First initialize all streams
3346         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3347             it != mStreamInfo.end(); it++) {
3348             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3349             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3350                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3351                setEis)
3352                 rc = channel->initialize(is_type);
3353             else {
3354                 rc = channel->initialize(IS_TYPE_NONE);
3355             }
3356             if (NO_ERROR != rc) {
3357                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
3358                 pthread_mutex_unlock(&mMutex);
3359                 goto error_exit;
3360             }
3361         }
3362 
3363         if (mRawDumpChannel) {
3364             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3365             if (rc != NO_ERROR) {
3366                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3367                 pthread_mutex_unlock(&mMutex);
3368                 goto error_exit;
3369             }
3370         }
3371         if (mSupportChannel) {
3372             rc = mSupportChannel->initialize(IS_TYPE_NONE);
3373             if (rc < 0) {
3374                 ALOGE("%s: Support channel initialization failed", __func__);
3375                 pthread_mutex_unlock(&mMutex);
3376                 goto error_exit;
3377             }
3378         }
3379         if (mAnalysisChannel) {
3380             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3381             if (rc < 0) {
3382                 ALOGE("%s: Analysis channel initialization failed", __func__);
3383                 pthread_mutex_unlock(&mMutex);
3384                 goto error_exit;
3385             }
3386         }
3387         if (mDummyBatchChannel) {
3388             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3389             if (rc < 0) {
3390                 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3391                 pthread_mutex_unlock(&mMutex);
3392                 goto error_exit;
3393             }
3394             rc = mDummyBatchChannel->initialize(is_type);
3395             if (rc < 0) {
3396                 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3397                 pthread_mutex_unlock(&mMutex);
3398                 goto error_exit;
3399             }
3400         }
3401 
3402         // Set bundle info
3403         rc = setBundleInfo();
3404         if (rc < 0) {
3405             ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3406             pthread_mutex_unlock(&mMutex);
3407             goto error_exit;
3408         }
3409 
3410         //Then start them.
3411         CDBG_HIGH("%s: Start META Channel", __func__);
3412         rc = mMetadataChannel->start();
3413         if (rc < 0) {
3414             ALOGE("%s: META channel start failed", __func__);
3415             pthread_mutex_unlock(&mMutex);
3416             goto error_exit;
3417         }
3418 
3419         if (mAnalysisChannel) {
3420             rc = mAnalysisChannel->start();
3421             if (rc < 0) {
3422                 ALOGE("%s: Analysis channel start failed", __func__);
3423                 mMetadataChannel->stop();
3424                 pthread_mutex_unlock(&mMutex);
3425                 goto error_exit;
3426             }
3427         }
3428 
3429         if (mSupportChannel) {
3430             rc = mSupportChannel->start();
3431             if (rc < 0) {
3432                 ALOGE("%s: Support channel start failed", __func__);
3433                 mMetadataChannel->stop();
3434                 /* Although support and analysis are mutually exclusive today
3435                    adding it in anycase for future proofing */
3436                 if (mAnalysisChannel) {
3437                     mAnalysisChannel->stop();
3438                 }
3439                 pthread_mutex_unlock(&mMutex);
3440                 goto error_exit;
3441             }
3442         }
3443         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3444             it != mStreamInfo.end(); it++) {
3445             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3446             CDBG_HIGH("%s: Start Processing Channel mask=%d",
3447                     __func__, channel->getStreamTypeMask());
3448             rc = channel->start();
3449             if (rc < 0) {
3450                 ALOGE("%s: channel start failed", __func__);
3451                 pthread_mutex_unlock(&mMutex);
3452                 goto error_exit;
3453             }
3454         }
3455 
3456         if (mRawDumpChannel) {
3457             CDBG("%s: Starting raw dump stream",__func__);
3458             rc = mRawDumpChannel->start();
3459             if (rc != NO_ERROR) {
3460                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3461                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3462                       it != mStreamInfo.end(); it++) {
3463                     QCamera3Channel *channel =
3464                         (QCamera3Channel *)(*it)->stream->priv;
3465                     ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3466                         channel->getStreamTypeMask());
3467                     channel->stop();
3468                 }
3469                 if (mSupportChannel)
3470                     mSupportChannel->stop();
3471                 if (mAnalysisChannel) {
3472                     mAnalysisChannel->stop();
3473                 }
3474                 mMetadataChannel->stop();
3475                 pthread_mutex_unlock(&mMutex);
3476                 goto error_exit;
3477             }
3478         }
3479 
3480         if (mChannelHandle) {
3481 
3482             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3483                     mChannelHandle);
3484             if (rc != NO_ERROR) {
3485                 ALOGE("%s: start_channel failed %d", __func__, rc);
3486                 pthread_mutex_unlock(&mMutex);
3487                 goto error_exit;
3488             }
3489         }
3490 
3491 
3492         goto no_error;
3493 error_exit:
3494         m_perfLock.lock_rel();
3495         return rc;
3496 no_error:
3497         m_perfLock.lock_rel();
3498 
3499         mWokenUpByDaemon = false;
3500         mPendingLiveRequest = 0;
3501         mFirstConfiguration = false;
3502         enablePowerHint();
3503     }
3504 
3505     uint32_t frameNumber = request->frame_number;
3506     cam_stream_ID_t streamID;
3507 
3508     if (meta.exists(ANDROID_REQUEST_ID)) {
3509         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3510         mCurrentRequestId = request_id;
3511         CDBG("%s: Received request with id: %d",__func__, request_id);
3512     } else if (mFirstRequest || mCurrentRequestId == -1){
3513         ALOGE("%s: Unable to find request id field, \
3514                 & no previous id available", __func__);
3515         pthread_mutex_unlock(&mMutex);
3516         return NAME_NOT_FOUND;
3517     } else {
3518         CDBG("%s: Re-using old request id", __func__);
3519         request_id = mCurrentRequestId;
3520     }
3521 
3522     CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3523                                     __func__, __LINE__,
3524                                     request->num_output_buffers,
3525                                     request->input_buffer,
3526                                     frameNumber);
3527     // Acquire all request buffers first
3528     streamID.num_streams = 0;
3529     int blob_request = 0;
3530     uint32_t snapshotStreamId = 0;
3531     for (size_t i = 0; i < request->num_output_buffers; i++) {
3532         const camera3_stream_buffer_t& output = request->output_buffers[i];
3533         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3534 
3535         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3536             //Call function to store local copy of jpeg data for encode params.
3537             blob_request = 1;
3538             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3539         }
3540 
3541         if (output.acquire_fence != -1) {
3542            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3543            close(output.acquire_fence);
3544            if (rc != OK) {
3545               ALOGE("%s: sync wait failed %d", __func__, rc);
3546               pthread_mutex_unlock(&mMutex);
3547               return rc;
3548            }
3549         }
3550 
3551         streamID.streamID[streamID.num_streams] =
3552             channel->getStreamID(channel->getStreamTypeMask());
3553         streamID.num_streams++;
3554 
3555         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3556             isVidBufRequested = true;
3557         }
3558     }
3559 
3560     if (blob_request && mRawDumpChannel) {
3561         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3562         streamID.streamID[streamID.num_streams] =
3563             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3564         streamID.num_streams++;
3565     }
3566 
3567     if(request->input_buffer == NULL) {
3568         /* Parse the settings:
3569          * - For every request in NORMAL MODE
3570          * - For every request in HFR mode during preview only case
3571          * - For first request of every batch in HFR mode during video
3572          * recording. In batchmode the same settings except frame number is
3573          * repeated in each request of the batch.
3574          */
3575         if (!mBatchSize ||
3576            (mBatchSize && !isVidBufRequested) ||
3577            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3578             rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3579             if (rc < 0) {
3580                 ALOGE("%s: fail to set frame parameters", __func__);
3581                 pthread_mutex_unlock(&mMutex);
3582                 return rc;
3583             }
3584         }
3585         /* For batchMode HFR, setFrameParameters is not called for every
3586          * request. But only frame number of the latest request is parsed.
3587          * Keep track of first and last frame numbers in a batch so that
3588          * metadata for the frame numbers of batch can be duplicated in
3589          * handleBatchMetadta */
3590         if (mBatchSize) {
3591             if (!mToBeQueuedVidBufs) {
3592                 //start of the batch
3593                 mFirstFrameNumberInBatch = request->frame_number;
3594             }
3595             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3596                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3597                 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3598                 return BAD_VALUE;
3599             }
3600         }
3601         if (mNeedSensorRestart) {
3602             /* Unlock the mutex as restartSensor waits on the channels to be
3603              * stopped, which in turn calls stream callback functions -
3604              * handleBufferWithLock and handleMetadataWithLock */
3605             pthread_mutex_unlock(&mMutex);
3606             rc = dynamicUpdateMetaStreamInfo();
3607             if (rc != NO_ERROR) {
3608                 ALOGE("%s: Restarting the sensor failed", __func__);
3609                 return BAD_VALUE;
3610             }
3611             mNeedSensorRestart = false;
3612             pthread_mutex_lock(&mMutex);
3613         }
3614     } else {
3615 
3616         if (request->input_buffer->acquire_fence != -1) {
3617            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3618            close(request->input_buffer->acquire_fence);
3619            if (rc != OK) {
3620               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3621               pthread_mutex_unlock(&mMutex);
3622               return rc;
3623            }
3624         }
3625     }
3626 
3627     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3628         mLastCustIntentFrmNum = frameNumber;
3629     }
3630     /* Update pending request list and pending buffers map */
3631     PendingRequestInfo pendingRequest;
3632     pendingRequestIterator latestRequest;
3633     pendingRequest.frame_number = frameNumber;
3634     pendingRequest.num_buffers = request->num_output_buffers;
3635     pendingRequest.request_id = request_id;
3636     pendingRequest.blob_request = blob_request;
3637     pendingRequest.timestamp = 0;
3638     pendingRequest.bUrgentReceived = 0;
3639     if (request->input_buffer) {
3640         pendingRequest.input_buffer =
3641                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3642         *(pendingRequest.input_buffer) = *(request->input_buffer);
3643         pInputBuffer = pendingRequest.input_buffer;
3644     } else {
3645        pendingRequest.input_buffer = NULL;
3646        pInputBuffer = NULL;
3647     }
3648 
3649     pendingRequest.pipeline_depth = 0;
3650     pendingRequest.partial_result_cnt = 0;
3651     extractJpegMetadata(mCurJpegMeta, request);
3652     pendingRequest.jpegMetadata = mCurJpegMeta;
3653     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3654     pendingRequest.shutter_notified = false;
3655     pendingRequest.need_dynamic_blklvl = false;
3656     pendingRequest.pending_extra_result = false;
3657 
3658     //extract capture intent
3659     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3660         mCaptureIntent =
3661                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3662     }
3663     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3664         mHybridAeEnable =
3665                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3666     }
3667     pendingRequest.capture_intent = mCaptureIntent;
3668     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3669 
3670     for (size_t i = 0; i < request->num_output_buffers; i++) {
3671         RequestedBufferInfo requestedBuf;
3672         memset(&requestedBuf, 0, sizeof(requestedBuf));
3673         requestedBuf.stream = request->output_buffers[i].stream;
3674         requestedBuf.buffer = NULL;
3675         pendingRequest.buffers.push_back(requestedBuf);
3676 
3677         // Add to buffer handle the pending buffers list
3678         PendingBufferInfo bufferInfo;
3679         bufferInfo.frame_number = frameNumber;
3680         bufferInfo.buffer = request->output_buffers[i].buffer;
3681         bufferInfo.stream = request->output_buffers[i].stream;
3682         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3683         mPendingBuffersMap.num_buffers++;
3684         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3685         CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3686                 __func__, frameNumber, bufferInfo.buffer,
3687                 channel->getStreamTypeMask(), bufferInfo.stream->format);
3688 
3689         if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
3690             if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
3691                 CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
3692                 pendingRequest.need_dynamic_blklvl = true;
3693             }
3694         }
3695     }
3696     mPendingBuffersMap.last_frame_number = frameNumber;
3697     latestRequest = mPendingRequestsList.insert(
3698             mPendingRequestsList.end(), pendingRequest);
3699     if(mFlush) {
3700         pthread_mutex_unlock(&mMutex);
3701         return NO_ERROR;
3702     }
3703 
3704     // Notify metadata channel we receive a request
3705     mMetadataChannel->request(NULL, frameNumber);
3706 
3707     if(request->input_buffer != NULL){
3708         CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3709         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3710         if (NO_ERROR != rc) {
3711             ALOGE("%s: fail to set reproc parameters", __func__);
3712             pthread_mutex_unlock(&mMutex);
3713             return rc;
3714         }
3715     }
3716 
3717     // Call request on other streams
3718     uint32_t streams_need_metadata = 0;
3719     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3720     for (size_t i = 0; i < request->num_output_buffers; i++) {
3721         const camera3_stream_buffer_t& output = request->output_buffers[i];
3722         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3723 
3724         if (channel == NULL) {
3725             ALOGE("%s: invalid channel pointer for stream", __func__);
3726             continue;
3727         }
3728 
3729         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3730             if(request->input_buffer != NULL){
3731                 rc = channel->request(output.buffer, frameNumber,
3732                         pInputBuffer, &mReprocMeta);
3733                 if (rc < 0) {
3734                     ALOGE("%s: Fail to request on picture channel", __func__);
3735                     pthread_mutex_unlock(&mMutex);
3736                     return rc;
3737                 }
3738             } else {
3739                 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3740                         __LINE__, output.buffer, frameNumber);
3741                 if (!request->settings) {
3742                     rc = channel->request(output.buffer, frameNumber,
3743                             NULL, mPrevParameters);
3744                 } else {
3745                     rc = channel->request(output.buffer, frameNumber,
3746                             NULL, mParameters);
3747                 }
3748                 if (rc < 0) {
3749                     ALOGE("%s: Fail to request on picture channel", __func__);
3750                     pthread_mutex_unlock(&mMutex);
3751                     return rc;
3752                 }
3753                 pendingBufferIter->need_metadata = true;
3754                 streams_need_metadata++;
3755             }
3756         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3757             bool needMetadata = false;
3758             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3759             rc = yuvChannel->request(output.buffer, frameNumber,
3760                     pInputBuffer,
3761                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3762             if (rc < 0) {
3763                 ALOGE("%s: Fail to request on YUV channel", __func__);
3764                 pthread_mutex_unlock(&mMutex);
3765                 return rc;
3766             }
3767             pendingBufferIter->need_metadata = needMetadata;
3768             if (needMetadata)
3769                 streams_need_metadata += 1;
3770             CDBG("%s: calling YUV channel request, need_metadata is %d",
3771                     __func__, needMetadata);
3772         } else {
3773             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3774                 __LINE__, output.buffer, frameNumber);
3775             rc = channel->request(output.buffer, frameNumber);
3776             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3777                     && mBatchSize) {
3778                 mToBeQueuedVidBufs++;
3779                 if (mToBeQueuedVidBufs == mBatchSize) {
3780                     channel->queueBatchBuf();
3781                 }
3782             }
3783             if (rc < 0) {
3784                 ALOGE("%s: request failed", __func__);
3785                 pthread_mutex_unlock(&mMutex);
3786                 return rc;
3787             }
3788         }
3789         pendingBufferIter++;
3790     }
3791 
3792     //If 2 streams have need_metadata set to true, fail the request, unless
3793     //we copy/reference count the metadata buffer
3794     if (streams_need_metadata > 1) {
3795         ALOGE("%s: not supporting request in which two streams requires"
3796                 " 2 HAL metadata for reprocessing", __func__);
3797         pthread_mutex_unlock(&mMutex);
3798         return -EINVAL;
3799     }
3800 
3801     if(request->input_buffer == NULL) {
3802         /* Set the parameters to backend:
3803          * - For every request in NORMAL MODE
3804          * - For every request in HFR mode during preview only case
3805          * - Once every batch in HFR mode during video recording
3806          */
3807         if (!mBatchSize ||
3808            (mBatchSize && !isVidBufRequested) ||
3809            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3810             CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3811                     __func__, mBatchSize, isVidBufRequested,
3812                     mToBeQueuedVidBufs);
3813             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3814                     mParameters);
3815             if (rc < 0) {
3816                 ALOGE("%s: set_parms failed", __func__);
3817             }
3818             /* reset to zero coz, the batch is queued */
3819             mToBeQueuedVidBufs = 0;
3820             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3821         }
3822         mPendingLiveRequest++;
3823     }
3824 
3825     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3826 
3827     mFirstRequest = false;
3828     // Added a timed condition wait
3829     struct timespec ts;
3830     uint8_t isValidTimeout = 1;
3831     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
3832     if (rc < 0) {
3833       isValidTimeout = 0;
3834       ALOGE("%s: Error reading the real time clock!!", __func__);
3835     }
3836     else {
3837       // Make timeout as 5 sec for request to be honored
3838       ts.tv_sec += 5;
3839     }
3840     //Block on conditional variable
3841     if (mBatchSize) {
3842         /* For HFR, more buffers are dequeued upfront to improve the performance */
3843         minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3844         maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3845     }
3846     while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3847         if (!isValidTimeout) {
3848             CDBG("%s: Blocking on conditional wait", __func__);
3849             pthread_cond_wait(&mRequestCond, &mMutex);
3850         }
3851         else {
3852             CDBG("%s: Blocking on timed conditional wait", __func__);
3853             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3854             if (rc == ETIMEDOUT) {
3855                 rc = -ENODEV;
3856                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
3857                 break;
3858             }
3859         }
3860         CDBG("%s: Unblocked", __func__);
3861         if (mWokenUpByDaemon) {
3862             mWokenUpByDaemon = false;
3863             if (mPendingLiveRequest < maxInFlightRequests)
3864                 break;
3865         }
3866     }
3867     pthread_mutex_unlock(&mMutex);
3868 
3869     return rc;
3870 }
3871 
3872 /*===========================================================================
3873  * FUNCTION   : dump
3874  *
3875  * DESCRIPTION:
3876  *
3877  * PARAMETERS :
3878  *
3879  *
3880  * RETURN     :
3881  *==========================================================================*/
dump(int fd)3882 void QCamera3HardwareInterface::dump(int fd)
3883 {
3884     pthread_mutex_lock(&mMutex);
3885     dprintf(fd, "\n Camera HAL3 information Begin \n");
3886 
3887     dprintf(fd, "\nNumber of pending requests: %zu \n",
3888         mPendingRequestsList.size());
3889     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3890     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3891     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3892     for(pendingRequestIterator i = mPendingRequestsList.begin();
3893             i != mPendingRequestsList.end(); i++) {
3894         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3895         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3896         i->input_buffer);
3897     }
3898     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3899                 mPendingBuffersMap.num_buffers);
3900     dprintf(fd, "-------+------------------\n");
3901     dprintf(fd, " Frame | Stream type mask \n");
3902     dprintf(fd, "-------+------------------\n");
3903     for(List<PendingBufferInfo>::iterator i =
3904         mPendingBuffersMap.mPendingBufferList.begin();
3905         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3906         QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3907         dprintf(fd, " %5d | %11d \n",
3908                 i->frame_number, channel->getStreamTypeMask());
3909     }
3910     dprintf(fd, "-------+------------------\n");
3911 
3912     dprintf(fd, "\nPending frame drop list: %zu\n",
3913         mPendingFrameDropList.size());
3914     dprintf(fd, "-------+-----------\n");
3915     dprintf(fd, " Frame | Stream ID \n");
3916     dprintf(fd, "-------+-----------\n");
3917     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3918         i != mPendingFrameDropList.end(); i++) {
3919         dprintf(fd, " %5d | %9d \n",
3920             i->frame_number, i->stream_ID);
3921     }
3922     dprintf(fd, "-------+-----------\n");
3923 
3924     dprintf(fd, "\n Camera HAL3 information End \n");
3925 
3926     /* use dumpsys media.camera as trigger to send update debug level event */
3927     mUpdateDebugLevel = true;
3928     pthread_mutex_unlock(&mMutex);
3929     return;
3930 }
3931 
3932 /*===========================================================================
3933  * FUNCTION   : flush
3934  *
3935  * DESCRIPTION:
3936  *
3937  * PARAMETERS :
3938  *
3939  *
3940  * RETURN     :
3941  *==========================================================================*/
flush()3942 int QCamera3HardwareInterface::flush()
3943 {
3944     ATRACE_CALL();
3945     int32_t rc = NO_ERROR;
3946 
3947     CDBG("%s: Unblocking Process Capture Request", __func__);
3948     pthread_mutex_lock(&mMutex);
3949 
3950     if (mFirstRequest) {
3951         pthread_mutex_unlock(&mMutex);
3952         return NO_ERROR;
3953     }
3954 
3955     mFlush = true;
3956     pthread_mutex_unlock(&mMutex);
3957 
3958     rc = stopAllChannels();
3959     if (rc < 0) {
3960         ALOGE("%s: stopAllChannels failed", __func__);
3961         return rc;
3962     }
3963     if (mChannelHandle) {
3964         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3965                 mChannelHandle);
3966     }
3967 
3968     // Reset bundle info
3969     rc = setBundleInfo();
3970     if (rc < 0) {
3971         ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3972         return rc;
3973     }
3974 
3975     // Mutex Lock
3976     pthread_mutex_lock(&mMutex);
3977 
3978     // Unblock process_capture_request
3979     mPendingLiveRequest = 0;
3980     pthread_cond_signal(&mRequestCond);
3981 
3982     rc = notifyErrorForPendingRequests();
3983     if (rc < 0) {
3984         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3985         pthread_mutex_unlock(&mMutex);
3986         return rc;
3987     }
3988 
3989     mFlush = false;
3990 
3991     // Start the Streams/Channels
3992     rc = startAllChannels();
3993     if (rc < 0) {
3994         ALOGE("%s: startAllChannels failed", __func__);
3995         pthread_mutex_unlock(&mMutex);
3996         return rc;
3997     }
3998 
3999     if (mChannelHandle) {
4000         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4001                     mChannelHandle);
4002         if (rc < 0) {
4003             ALOGE("%s: start_channel failed", __func__);
4004             pthread_mutex_unlock(&mMutex);
4005             return rc;
4006         }
4007     }
4008 
4009     pthread_mutex_unlock(&mMutex);
4010 
4011     return 0;
4012 }
4013 
4014 /*===========================================================================
4015  * FUNCTION   : captureResultCb
4016  *
4017  * DESCRIPTION: Callback handler for all capture result
4018  *              (streams, as well as metadata)
4019  *
4020  * PARAMETERS :
4021  *   @metadata : metadata information
4022  *   @buffer   : actual gralloc buffer to be returned to frameworks.
4023  *               NULL if metadata.
4024  *
4025  * RETURN     : NONE
4026  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4027 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4028                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4029 {
4030     if (metadata_buf) {
4031         if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
4032             handleBatchMetadata(metadata_buf,
4033                     true /* free_and_bufdone_meta_buf */);
4034         } else { /* mBatchSize = 0 */
4035             hdrPlusPerfLock(metadata_buf);
4036             pthread_mutex_lock(&mMutex);
4037             handleMetadataWithLock(metadata_buf,
4038                     true /* free_and_bufdone_meta_buf */,
4039                     false /* first frame of batch metadata */ );
4040             pthread_mutex_unlock(&mMutex);
4041         }
4042     } else if (isInputBuffer) {
4043         pthread_mutex_lock(&mMutex);
4044         handleInputBufferWithLock(frame_number);
4045         pthread_mutex_unlock(&mMutex);
4046     } else {
4047         pthread_mutex_lock(&mMutex);
4048         handleBufferWithLock(buffer, frame_number);
4049         pthread_mutex_unlock(&mMutex);
4050     }
4051     return;
4052 }
4053 
4054 /*===========================================================================
4055  * FUNCTION   : getReprocessibleOutputStreamId
4056  *
4057  * DESCRIPTION: Get source output stream id for the input reprocess stream
4058  *              based on size and format, which would be the largest
4059  *              output stream if an input stream exists.
4060  *
4061  * PARAMETERS :
4062  *   @id      : return the stream id if found
4063  *
4064  * RETURN     : int32_t type of status
4065  *              NO_ERROR  -- success
4066  *              none-zero failure code
4067  *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4068 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4069 {
4070     stream_info_t* stream = NULL;
4071 
4072     /* check if any output or bidirectional stream with the same size and format
4073        and return that stream */
4074     if ((mInputStreamInfo.dim.width > 0) &&
4075             (mInputStreamInfo.dim.height > 0)) {
4076         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4077                 it != mStreamInfo.end(); it++) {
4078 
4079             camera3_stream_t *stream = (*it)->stream;
4080             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4081                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4082                     (stream->format == mInputStreamInfo.format)) {
4083                 // Usage flag for an input stream and the source output stream
4084                 // may be different.
4085                 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
4086                 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
4087                         __func__, stream->usage, mInputStreamInfo.usage);
4088 
4089                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4090                 if (channel != NULL && channel->mStreams[0]) {
4091                     id = channel->mStreams[0]->getMyServerID();
4092                     return NO_ERROR;
4093                 }
4094             }
4095         }
4096     } else {
4097         CDBG("%s: No input stream, so no reprocessible output stream", __func__);
4098     }
4099     return NAME_NOT_FOUND;
4100 }
4101 
4102 /*===========================================================================
4103  * FUNCTION   : lookupFwkName
4104  *
4105  * DESCRIPTION: In case the enum is not same in fwk and backend
4106  *              make sure the parameter is correctly propogated
4107  *
4108  * PARAMETERS  :
4109  *   @arr      : map between the two enums
4110  *   @len      : len of the map
4111  *   @hal_name : name of the hal_parm to map
4112  *
4113  * RETURN     : int type of status
4114  *              fwk_name  -- success
4115  *              none-zero failure code
4116  *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4117 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4118         size_t len, halType hal_name)
4119 {
4120 
4121     for (size_t i = 0; i < len; i++) {
4122         if (arr[i].hal_name == hal_name) {
4123             return arr[i].fwk_name;
4124         }
4125     }
4126 
4127     /* Not able to find matching framework type is not necessarily
4128      * an error case. This happens when mm-camera supports more attributes
4129      * than the frameworks do */
4130     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
4131     return NAME_NOT_FOUND;
4132 }
4133 
4134 /*===========================================================================
4135  * FUNCTION   : lookupHalName
4136  *
4137  * DESCRIPTION: In case the enum is not same in fwk and backend
4138  *              make sure the parameter is correctly propogated
4139  *
4140  * PARAMETERS  :
4141  *   @arr      : map between the two enums
4142  *   @len      : len of the map
4143  *   @fwk_name : name of the hal_parm to map
4144  *
4145  * RETURN     : int32_t type of status
4146  *              hal_name  -- success
4147  *              none-zero failure code
4148  *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)4149 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4150         size_t len, fwkType fwk_name)
4151 {
4152     for (size_t i = 0; i < len; i++) {
4153         if (arr[i].fwk_name == fwk_name) {
4154             return arr[i].hal_name;
4155         }
4156     }
4157 
4158     ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
4159     return NAME_NOT_FOUND;
4160 }
4161 
4162 /*===========================================================================
4163  * FUNCTION   : lookupProp
4164  *
4165  * DESCRIPTION: lookup a value by its name
4166  *
4167  * PARAMETERS :
4168  *   @arr     : map between the two enums
4169  *   @len     : size of the map
4170  *   @name    : name to be looked up
4171  *
4172  * RETURN     : Value if found
4173  *              CAM_CDS_MODE_MAX if not found
4174  *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)4175 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4176         size_t len, const char *name)
4177 {
4178     if (name) {
4179         for (size_t i = 0; i < len; i++) {
4180             if (!strcmp(arr[i].desc, name)) {
4181                 return arr[i].val;
4182             }
4183         }
4184     }
4185     return CAM_CDS_MODE_MAX;
4186 }
4187 
4188 /*===========================================================================
4189  *
4190  * DESCRIPTION:
4191  *
4192  * PARAMETERS :
4193  *   @metadata : metadata information from callback
4194  *   @timestamp: metadata buffer timestamp
4195  *   @request_id: request id
4196  *   @hybrid_ae_enable: whether hybrid ae is enabled
4197  *   @jpegMetadata: additional jpeg metadata
4198  *   @pprocDone: whether internal offline postprocsesing is done
4199  *
4200  * RETURN     : camera_metadata_t*
4201  *              metadata in a format specified by fwk
4202  *==========================================================================*/
4203 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,bool pprocDone,bool dynamic_blklvl,bool firstMetadataInBatch)4204 QCamera3HardwareInterface::translateFromHalMetadata(
4205                                  metadata_buffer_t *metadata,
4206                                  nsecs_t timestamp,
4207                                  int32_t request_id,
4208                                  const CameraMetadata& jpegMetadata,
4209                                  uint8_t pipeline_depth,
4210                                  uint8_t capture_intent,
4211                                  uint8_t hybrid_ae_enable,
4212                                  bool pprocDone,
4213                                  bool dynamic_blklvl,
4214                                  bool firstMetadataInBatch)
4215 {
4216     CameraMetadata camMetadata;
4217     camera_metadata_t *resultMetadata;
4218 
4219     if (mBatchSize && !firstMetadataInBatch) {
4220         /* In batch mode, use cached metadata from the first metadata
4221             in the batch */
4222         camMetadata.clear();
4223         camMetadata = mCachedMetadata;
4224     }
4225 
4226     if (jpegMetadata.entryCount())
4227         camMetadata.append(jpegMetadata);
4228 
4229     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4230     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4231     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4232     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4233     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4234 
4235     if (mBatchSize && !firstMetadataInBatch) {
4236         /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4237         resultMetadata = camMetadata.release();
4238         return resultMetadata;
4239     }
4240 
4241     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4242         int64_t fwk_frame_number = *frame_number;
4243         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4244     }
4245 
4246     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4247         int32_t fps_range[2];
4248         fps_range[0] = (int32_t)float_range->min_fps;
4249         fps_range[1] = (int32_t)float_range->max_fps;
4250         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4251                                       fps_range, 2);
4252         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4253             __func__, fps_range[0], fps_range[1]);
4254     }
4255 
4256     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4257         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4258     }
4259 
4260     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4261         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4262                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4263                 *sceneMode);
4264         if (NAME_NOT_FOUND != val) {
4265             uint8_t fwkSceneMode = (uint8_t)val;
4266             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4267             CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4268                     __func__, fwkSceneMode);
4269         }
4270     }
4271 
4272     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4273         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4274         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4275     }
4276 
4277     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4278         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4279         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4280     }
4281 
4282     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4283         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4284         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4285     }
4286 
4287     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4288             CAM_INTF_META_EDGE_MODE, metadata) {
4289         uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4290         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4291     }
4292 
4293     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4294         uint8_t fwk_flashPower = (uint8_t) *flashPower;
4295         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4296     }
4297 
4298     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4299         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4300     }
4301 
4302     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4303         if (0 <= *flashState) {
4304             uint8_t fwk_flashState = (uint8_t) *flashState;
4305             if (!gCamCapability[mCameraId]->flash_available) {
4306                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4307             }
4308             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4309         }
4310     }
4311 
4312     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4313         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4314         if (NAME_NOT_FOUND != val) {
4315             uint8_t fwk_flashMode = (uint8_t)val;
4316             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4317         }
4318     }
4319 
4320     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4321         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4322         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4323     }
4324 
4325     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4326         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4327     }
4328 
4329     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4330         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4331     }
4332 
4333     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4334         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4335     }
4336 
4337     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4338         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4339         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4340     }
4341 
4342     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4343         uint8_t fwk_videoStab = (uint8_t) *videoStab;
4344         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4345     }
4346 
4347     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4348         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4349         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4350     }
4351 
4352     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4353         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4354     }
4355 
4356     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4357         CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4358 
4359         CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4360           blackLevelSourcePattern->cam_black_level[0],
4361           blackLevelSourcePattern->cam_black_level[1],
4362           blackLevelSourcePattern->cam_black_level[2],
4363           blackLevelSourcePattern->cam_black_level[3]);
4364     }
4365 
4366     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4367         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4368         float fwk_blackLevelInd[4];
4369 
4370         fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4371         fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4372         fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4373         fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4374 
4375         CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4376           blackLevelAppliedPattern->cam_black_level[0],
4377           blackLevelAppliedPattern->cam_black_level[1],
4378           blackLevelAppliedPattern->cam_black_level[2],
4379           blackLevelAppliedPattern->cam_black_level[3]);
4380         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4381         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4382 
4383         // if dynmaic_blklvl is true, we calculate blklvl from raw callback
4384         // otherwise, use the value from linearization LUT.
4385         if (dynamic_blklvl == false) {
4386             // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4387             // depth space.
4388             fwk_blackLevelInd[0] /= 64.0;
4389             fwk_blackLevelInd[1] /= 64.0;
4390             fwk_blackLevelInd[2] /= 64.0;
4391             fwk_blackLevelInd[3] /= 64.0;
4392             camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4393         }
4394     }
4395 
4396     // Fixed whitelevel is used by ISP/Sensor
4397     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4398             &gCamCapability[mCameraId]->white_level, 1);
4399 
4400     if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4401         gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4402         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4403         for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4404             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4405         }
4406         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4407                 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4408     }
4409 
4410     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4411             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4412         int32_t scalerCropRegion[4];
4413         scalerCropRegion[0] = hScalerCropRegion->left;
4414         scalerCropRegion[1] = hScalerCropRegion->top;
4415         scalerCropRegion[2] = hScalerCropRegion->width;
4416         scalerCropRegion[3] = hScalerCropRegion->height;
4417 
4418         // Adjust crop region from sensor output coordinate system to active
4419         // array coordinate system.
4420         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4421                 scalerCropRegion[2], scalerCropRegion[3]);
4422 
4423         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4424     }
4425 
4426     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4427         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4428         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4429     }
4430 
4431     IF_META_AVAILABLE(int64_t, sensorFameDuration,
4432             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4433         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4434         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4435     }
4436 
4437     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4438             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4439         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4440         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4441                 sensorRollingShutterSkew, 1);
4442     }
4443 
4444     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4445         CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4446         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4447 
4448         //calculate the noise profile based on sensitivity
4449         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4450         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4451         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4452         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4453             noise_profile[i]   = noise_profile_S;
4454             noise_profile[i+1] = noise_profile_O;
4455         }
4456         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4457                 noise_profile_S, noise_profile_O);
4458         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4459                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4460     }
4461 
4462     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4463         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4464         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4465     }
4466 
4467     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4468         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4469                 *faceDetectMode);
4470         if (NAME_NOT_FOUND != val) {
4471             uint8_t fwk_faceDetectMode = (uint8_t)val;
4472             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4473 
4474             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4475                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4476                         CAM_INTF_META_FACE_DETECTION, metadata) {
4477                     uint8_t numFaces = MIN(
4478                             faceDetectionInfo->num_faces_detected, MAX_ROI);
4479                     int32_t faceIds[MAX_ROI];
4480                     uint8_t faceScores[MAX_ROI];
4481                     int32_t faceRectangles[MAX_ROI * 4];
4482                     int32_t faceLandmarks[MAX_ROI * 6];
4483                     size_t j = 0, k = 0;
4484 
4485                     for (size_t i = 0; i < numFaces; i++) {
4486                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4487                         // Adjust crop region from sensor output coordinate system to active
4488                         // array coordinate system.
4489                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4490                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
4491                                 rect.width, rect.height);
4492 
4493                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4494                                 faceRectangles+j, -1);
4495 
4496                         // Map the co-ordinate sensor output coordinate system to active
4497                         // array coordinate system.
4498                         cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4499                         mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4500                                 face.left_eye_center.y);
4501                         mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4502                                 face.right_eye_center.y);
4503                         mCropRegionMapper.toActiveArray(face.mouth_center.x,
4504                                 face.mouth_center.y);
4505 
4506                         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4507                         j+= 4;
4508                         k+= 6;
4509                     }
4510                     if (numFaces <= 0) {
4511                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4512                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4513                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4514                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4515                     }
4516 
4517                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4518                             numFaces);
4519                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4520                             faceRectangles, numFaces * 4U);
4521                     if (fwk_faceDetectMode ==
4522                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4523                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4524                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4525                                 faceLandmarks, numFaces * 6U);
4526                    }
4527                 }
4528             }
4529         }
4530     }
4531 
4532     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4533         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4534         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4535     }
4536 
4537     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4538             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4539         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4540         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4541     }
4542 
4543     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4544             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4545         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4546                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4547     }
4548 
4549     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4550             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4551         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4552                 CAM_MAX_SHADING_MAP_HEIGHT);
4553         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4554                 CAM_MAX_SHADING_MAP_WIDTH);
4555         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4556                 lensShadingMap->lens_shading, 4U * map_width * map_height);
4557     }
4558 
4559     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4560         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4561         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4562     }
4563 
4564     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4565         //Populate CAM_INTF_META_TONEMAP_CURVES
4566         /* ch0 = G, ch 1 = B, ch 2 = R*/
4567         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4568             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4569                     __func__, tonemap->tonemap_points_cnt,
4570                     CAM_MAX_TONEMAP_CURVE_SIZE);
4571             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4572         }
4573 
4574         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4575                         &tonemap->curves[0].tonemap_points[0][0],
4576                         tonemap->tonemap_points_cnt * 2);
4577 
4578         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4579                         &tonemap->curves[1].tonemap_points[0][0],
4580                         tonemap->tonemap_points_cnt * 2);
4581 
4582         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4583                         &tonemap->curves[2].tonemap_points[0][0],
4584                         tonemap->tonemap_points_cnt * 2);
4585     }
4586 
4587     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4588             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4589         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4590                 CC_GAINS_COUNT);
4591     }
4592 
4593     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4594             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4595         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4596                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4597                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
4598     }
4599 
4600     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4601             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4602         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4603             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4604                     __func__, toneCurve->tonemap_points_cnt,
4605                     CAM_MAX_TONEMAP_CURVE_SIZE);
4606             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4607         }
4608         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4609                 (float*)toneCurve->curve.tonemap_points,
4610                 toneCurve->tonemap_points_cnt * 2);
4611     }
4612 
4613     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4614             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4615         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4616                 predColorCorrectionGains->gains, 4);
4617     }
4618 
4619     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4620             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4621         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4622                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4623                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
4624     }
4625 
4626     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4627         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4628     }
4629 
4630     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4631         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4632         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4633     }
4634 
4635     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4636         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4637         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4638     }
4639 
4640     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4641         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4642                 *effectMode);
4643         if (NAME_NOT_FOUND != val) {
4644             uint8_t fwk_effectMode = (uint8_t)val;
4645             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4646         }
4647     }
4648 
4649     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4650             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4651         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4652                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4653         if (NAME_NOT_FOUND != fwk_testPatternMode) {
4654             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4655         }
4656         int32_t fwk_testPatternData[4];
4657         fwk_testPatternData[0] = testPatternData->r;
4658         fwk_testPatternData[3] = testPatternData->b;
4659         switch (gCamCapability[mCameraId]->color_arrangement) {
4660         case CAM_FILTER_ARRANGEMENT_RGGB:
4661         case CAM_FILTER_ARRANGEMENT_GRBG:
4662             fwk_testPatternData[1] = testPatternData->gr;
4663             fwk_testPatternData[2] = testPatternData->gb;
4664             break;
4665         case CAM_FILTER_ARRANGEMENT_GBRG:
4666         case CAM_FILTER_ARRANGEMENT_BGGR:
4667             fwk_testPatternData[2] = testPatternData->gr;
4668             fwk_testPatternData[1] = testPatternData->gb;
4669             break;
4670         default:
4671             ALOGE("%s: color arrangement %d is not supported", __func__,
4672                 gCamCapability[mCameraId]->color_arrangement);
4673             break;
4674         }
4675         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4676     }
4677 
4678     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4679         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4680     }
4681 
4682     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4683         String8 str((const char *)gps_methods);
4684         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4685     }
4686 
4687     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4688         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4689     }
4690 
4691     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4692         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4693     }
4694 
4695     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4696         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4697         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4698     }
4699 
4700     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4701         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4702         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4703     }
4704 
4705     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4706         int32_t fwk_thumb_size[2];
4707         fwk_thumb_size[0] = thumb_size->width;
4708         fwk_thumb_size[1] = thumb_size->height;
4709         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4710     }
4711 
4712     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4713         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4714                 privateData,
4715                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4716     }
4717 
4718     if (metadata->is_tuning_params_valid) {
4719         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4720         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4721         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4722 
4723 
4724         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4725                 sizeof(uint32_t));
4726         data += sizeof(uint32_t);
4727 
4728         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4729                 sizeof(uint32_t));
4730         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4731         data += sizeof(uint32_t);
4732 
4733         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4734                 sizeof(uint32_t));
4735         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4736         data += sizeof(uint32_t);
4737 
4738         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4739                 sizeof(uint32_t));
4740         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4741         data += sizeof(uint32_t);
4742 
4743         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4744                 sizeof(uint32_t));
4745         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4746         data += sizeof(uint32_t);
4747 
4748         metadata->tuning_params.tuning_mod3_data_size = 0;
4749         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4750                 sizeof(uint32_t));
4751         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4752         data += sizeof(uint32_t);
4753 
4754         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4755                 TUNING_SENSOR_DATA_MAX);
4756         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4757                 count);
4758         data += count;
4759 
4760         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4761                 TUNING_VFE_DATA_MAX);
4762         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4763                 count);
4764         data += count;
4765 
4766         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4767                 TUNING_CPP_DATA_MAX);
4768         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4769                 count);
4770         data += count;
4771 
4772         count = MIN(metadata->tuning_params.tuning_cac_data_size,
4773                 TUNING_CAC_DATA_MAX);
4774         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4775                 count);
4776         data += count;
4777 
4778         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4779                 (int32_t *)(void *)tuning_meta_data_blob,
4780                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4781     }
4782 
4783     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4784             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4785         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4786                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4787                 NEUTRAL_COL_POINTS);
4788     }
4789 
4790     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4791         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4792         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4793     }
4794 
4795     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4796         int32_t aeRegions[REGIONS_TUPLE_COUNT];
4797         // Adjust crop region from sensor output coordinate system to active
4798         // array coordinate system.
4799         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4800                 hAeRegions->rect.width, hAeRegions->rect.height);
4801 
4802         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4803         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4804                 REGIONS_TUPLE_COUNT);
4805         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4806                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4807                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4808                 hAeRegions->rect.height);
4809     }
4810 
4811     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4812         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4813         if (NAME_NOT_FOUND != val) {
4814             uint8_t fwkAfMode = (uint8_t)val;
4815             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4816             CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
4817         } else {
4818             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
4819                     __func__, val);
4820         }
4821     }
4822 
4823     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4824         uint8_t fwk_afState = (uint8_t) *afState;
4825         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4826         CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4827     }
4828 
4829     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4830         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4831     }
4832 
4833     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4834         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4835     }
4836 
4837     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4838         uint8_t fwk_lensState = *lensState;
4839         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4840     }
4841 
4842     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4843         /*af regions*/
4844         int32_t afRegions[REGIONS_TUPLE_COUNT];
4845         // Adjust crop region from sensor output coordinate system to active
4846         // array coordinate system.
4847         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4848                 hAfRegions->rect.width, hAfRegions->rect.height);
4849 
4850         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4851         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4852                 REGIONS_TUPLE_COUNT);
4853         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4854                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4855                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4856                 hAfRegions->rect.height);
4857     }
4858 
4859     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4860         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4861                 *hal_ab_mode);
4862         if (NAME_NOT_FOUND != val) {
4863             uint8_t fwk_ab_mode = (uint8_t)val;
4864             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4865         }
4866     }
4867 
4868     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4869         int val = lookupFwkName(SCENE_MODES_MAP,
4870                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4871         if (NAME_NOT_FOUND != val) {
4872             uint8_t fwkBestshotMode = (uint8_t)val;
4873             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4874             CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4875         } else {
4876             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4877         }
4878     }
4879 
4880     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4881          uint8_t fwk_mode = (uint8_t) *mode;
4882          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4883     }
4884 
4885     /* Constant metadata values to be update*/
4886     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4887     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4888 
4889     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4890     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4891 
4892     int32_t hotPixelMap[2];
4893     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4894 
4895     // CDS
4896     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4897         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4898     }
4899 
4900     // TNR
4901     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4902         uint8_t tnr_enable       = tnr->denoise_enable;
4903         int32_t tnr_process_type = (int32_t)tnr->process_plates;
4904 
4905         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4906         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4907     }
4908 
4909     // Reprocess crop data
4910     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4911         uint8_t cnt = crop_data->num_of_streams;
4912         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4913             // mm-qcamera-daemon only posts crop_data for streams
4914             // not linked to pproc. So no valid crop metadata is not
4915             // necessarily an error case.
4916             CDBG("%s: No valid crop metadata entries", __func__);
4917         } else {
4918             uint32_t reproc_stream_id;
4919             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4920                 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4921             } else {
4922                 int rc = NO_ERROR;
4923                 Vector<int32_t> roi_map;
4924                 int32_t *crop = new int32_t[cnt*4];
4925                 if (NULL == crop) {
4926                    rc = NO_MEMORY;
4927                 }
4928                 if (NO_ERROR == rc) {
4929                     int32_t streams_found = 0;
4930                     for (size_t i = 0; i < cnt; i++) {
4931                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4932                             if (pprocDone) {
4933                                 // HAL already does internal reprocessing,
4934                                 // either via reprocessing before JPEG encoding,
4935                                 // or offline postprocessing for pproc bypass case.
4936                                 crop[0] = 0;
4937                                 crop[1] = 0;
4938                                 crop[2] = mInputStreamInfo.dim.width;
4939                                 crop[3] = mInputStreamInfo.dim.height;
4940                             } else {
4941                                 crop[0] = crop_data->crop_info[i].crop.left;
4942                                 crop[1] = crop_data->crop_info[i].crop.top;
4943                                 crop[2] = crop_data->crop_info[i].crop.width;
4944                                 crop[3] = crop_data->crop_info[i].crop.height;
4945                             }
4946                             roi_map.add(crop_data->crop_info[i].roi_map.left);
4947                             roi_map.add(crop_data->crop_info[i].roi_map.top);
4948                             roi_map.add(crop_data->crop_info[i].roi_map.width);
4949                             roi_map.add(crop_data->crop_info[i].roi_map.height);
4950                             streams_found++;
4951                             CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4952                                     __func__,
4953                                     crop[0], crop[1], crop[2], crop[3]);
4954                             CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4955                                     __func__,
4956                                     crop_data->crop_info[i].roi_map.left,
4957                                     crop_data->crop_info[i].roi_map.top,
4958                                     crop_data->crop_info[i].roi_map.width,
4959                                     crop_data->crop_info[i].roi_map.height);
4960                             break;
4961 
4962                        }
4963                     }
4964                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4965                             &streams_found, 1);
4966                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
4967                             crop, (size_t)(streams_found * 4));
4968                     if (roi_map.array()) {
4969                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4970                                 roi_map.array(), roi_map.size());
4971                     }
4972                }
4973                if (crop) {
4974                    delete [] crop;
4975                }
4976             }
4977         }
4978     }
4979 
4980     IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4981         int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4982                 *cacMode);
4983         if (NAME_NOT_FOUND != val) {
4984             uint8_t fwkCacMode = (uint8_t)val;
4985             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4986         } else {
4987             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4988         }
4989     }
4990 
4991     // Post blob of cam_cds_data through vendor tag.
4992     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4993         uint8_t cnt = cdsInfo->num_of_streams;
4994         cam_cds_data_t cdsDataOverride;
4995         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4996         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4997         cdsDataOverride.num_of_streams = 1;
4998         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4999             uint32_t reproc_stream_id;
5000             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5001                 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
5002             } else {
5003                 for (size_t i = 0; i < cnt; i++) {
5004                     if (cdsInfo->cds_info[i].stream_id ==
5005                             reproc_stream_id) {
5006                         cdsDataOverride.cds_info[0].cds_enable =
5007                                 cdsInfo->cds_info[i].cds_enable;
5008                         break;
5009                     }
5010                 }
5011             }
5012         } else {
5013             CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
5014         }
5015         camMetadata.update(QCAMERA3_CDS_INFO,
5016                 (uint8_t *)&cdsDataOverride,
5017                 sizeof(cam_cds_data_t));
5018     }
5019 
5020     // Ldaf calibration data
5021     if (!mLdafCalibExist) {
5022         IF_META_AVAILABLE(uint32_t, ldafCalib,
5023                 CAM_INTF_META_LDAF_EXIF, metadata) {
5024             mLdafCalibExist = true;
5025             mLdafCalib[0] = ldafCalib[0];
5026             mLdafCalib[1] = ldafCalib[1];
5027             CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
5028                     ldafCalib[0], ldafCalib[1]);
5029         }
5030     }
5031 
5032     // Post Raw Sensitivity Boost = ISP digital gain
5033     IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
5034         int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
5035         camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
5036     }
5037 
5038     /* In batch mode, cache the first metadata in the batch */
5039     if (mBatchSize && firstMetadataInBatch) {
5040         mCachedMetadata.clear();
5041         mCachedMetadata = camMetadata;
5042     }
5043 
5044     resultMetadata = camMetadata.release();
5045     return resultMetadata;
5046 }
5047 
5048 /*===========================================================================
5049  * FUNCTION   : saveExifParams
5050  *
5051  * DESCRIPTION:
5052  *
5053  * PARAMETERS :
5054  *   @metadata : metadata information from callback
5055  *
5056  * RETURN     : none
5057  *
5058  *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)5059 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5060 {
5061     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5062             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5063         mExifParams.ae_debug_params = *ae_exif_debug_params;
5064         mExifParams.ae_debug_params_valid = TRUE;
5065     }
5066     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5067             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5068         mExifParams.awb_debug_params = *awb_exif_debug_params;
5069         mExifParams.awb_debug_params_valid = TRUE;
5070     }
5071     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5072             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5073         mExifParams.af_debug_params = *af_exif_debug_params;
5074         mExifParams.af_debug_params_valid = TRUE;
5075     }
5076     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5077             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5078         mExifParams.asd_debug_params = *asd_exif_debug_params;
5079         mExifParams.asd_debug_params_valid = TRUE;
5080     }
5081     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5082             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5083         mExifParams.stats_debug_params = *stats_exif_debug_params;
5084         mExifParams.stats_debug_params_valid = TRUE;
5085     }
5086 }
5087 
5088 /*===========================================================================
5089  * FUNCTION   : get3AExifParams
5090  *
5091  * DESCRIPTION:
5092  *
5093  * PARAMETERS : none
5094  *
5095  *
5096  * RETURN     : mm_jpeg_exif_params_t
5097  *
5098  *==========================================================================*/
get3AExifParams()5099 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5100 {
5101     return mExifParams;
5102 }
5103 
5104 /*===========================================================================
5105  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5106  *
5107  * DESCRIPTION:
5108  *
5109  * PARAMETERS :
5110  *   @metadata : metadata information from callback
5111  *
5112  * RETURN     : camera_metadata_t*
5113  *              metadata in a format specified by fwk
5114  *==========================================================================*/
5115 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)5116 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5117                                 (metadata_buffer_t *metadata)
5118 {
5119     CameraMetadata camMetadata;
5120     camera_metadata_t *resultMetadata;
5121 
5122 
5123     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5124         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5125         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5126         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
5127     }
5128 
5129     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5130         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5131                 &aecTrigger->trigger, 1);
5132         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5133                 &aecTrigger->trigger_id, 1);
5134         CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5135                 __func__, aecTrigger->trigger);
5136         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
5137                 aecTrigger->trigger_id);
5138     }
5139 
5140     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5141         uint8_t fwk_ae_state = (uint8_t) *ae_state;
5142         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5143         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
5144     }
5145 
5146     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5147         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5148                 &af_trigger->trigger, 1);
5149         CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5150                 __func__, af_trigger->trigger);
5151         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5152         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
5153                 af_trigger->trigger_id);
5154     }
5155 
5156     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5157         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5158                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5159         if (NAME_NOT_FOUND != val) {
5160             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5161             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5162             CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
5163         } else {
5164             CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
5165         }
5166     }
5167 
5168     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5169     uint32_t aeMode = CAM_AE_MODE_MAX;
5170     int32_t flashMode = CAM_FLASH_MODE_MAX;
5171     int32_t redeye = -1;
5172     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5173         aeMode = *pAeMode;
5174     }
5175     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5176         flashMode = *pFlashMode;
5177     }
5178     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5179         redeye = *pRedeye;
5180     }
5181 
5182     if (1 == redeye) {
5183         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5184         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5185     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5186         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5187                 flashMode);
5188         if (NAME_NOT_FOUND != val) {
5189             fwk_aeMode = (uint8_t)val;
5190             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5191         } else {
5192             ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
5193         }
5194     } else if (aeMode == CAM_AE_MODE_ON) {
5195         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5196         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5197     } else if (aeMode == CAM_AE_MODE_OFF) {
5198         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5199         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5200     } else {
5201         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5202               "flashMode:%d, aeMode:%u!!!",
5203                 __func__, redeye, flashMode, aeMode);
5204     }
5205 
5206     resultMetadata = camMetadata.release();
5207     return resultMetadata;
5208 }
5209 
5210 /*===========================================================================
5211  * FUNCTION   : dumpMetadataToFile
5212  *
5213  * DESCRIPTION: Dumps tuning metadata to file system
5214  *
5215  * PARAMETERS :
5216  *   @meta           : tuning metadata
5217  *   @dumpFrameCount : current dump frame count
5218  *   @enabled        : Enable mask
5219  *
5220  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)5221 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5222                                                    uint32_t &dumpFrameCount,
5223                                                    bool enabled,
5224                                                    const char *type,
5225                                                    uint32_t frameNumber)
5226 {
5227     uint32_t frm_num = 0;
5228 
5229     //Some sanity checks
5230     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5231         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
5232               __func__,
5233               meta.tuning_sensor_data_size,
5234               TUNING_SENSOR_DATA_MAX);
5235         return;
5236     }
5237 
5238     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5239         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5240               __func__,
5241               meta.tuning_vfe_data_size,
5242               TUNING_VFE_DATA_MAX);
5243         return;
5244     }
5245 
5246     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5247         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5248               __func__,
5249               meta.tuning_cpp_data_size,
5250               TUNING_CPP_DATA_MAX);
5251         return;
5252     }
5253 
5254     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5255         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5256               __func__,
5257               meta.tuning_cac_data_size,
5258               TUNING_CAC_DATA_MAX);
5259         return;
5260     }
5261     //
5262 
5263     if(enabled){
5264         char timeBuf[FILENAME_MAX];
5265         char buf[FILENAME_MAX];
5266         memset(buf, 0, sizeof(buf));
5267         memset(timeBuf, 0, sizeof(timeBuf));
5268         time_t current_time;
5269         struct tm * timeinfo;
5270         time (&current_time);
5271         timeinfo = localtime (&current_time);
5272         if (timeinfo != NULL) {
5273             strftime (timeBuf, sizeof(timeBuf),
5274                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5275         }
5276         String8 filePath(timeBuf);
5277         snprintf(buf,
5278                 sizeof(buf),
5279                 "%dm_%s_%d.bin",
5280                 dumpFrameCount,
5281                 type,
5282                 frameNumber);
5283         filePath.append(buf);
5284         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5285         if (file_fd >= 0) {
5286             ssize_t written_len = 0;
5287             meta.tuning_data_version = TUNING_DATA_VERSION;
5288             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5289             written_len += write(file_fd, data, sizeof(uint32_t));
5290             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5291             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5292             written_len += write(file_fd, data, sizeof(uint32_t));
5293             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5294             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5295             written_len += write(file_fd, data, sizeof(uint32_t));
5296             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5297             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5298             written_len += write(file_fd, data, sizeof(uint32_t));
5299             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5300             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5301             written_len += write(file_fd, data, sizeof(uint32_t));
5302             meta.tuning_mod3_data_size = 0;
5303             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5304             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5305             written_len += write(file_fd, data, sizeof(uint32_t));
5306             size_t total_size = meta.tuning_sensor_data_size;
5307             data = (void *)((uint8_t *)&meta.data);
5308             written_len += write(file_fd, data, total_size);
5309             total_size = meta.tuning_vfe_data_size;
5310             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5311             written_len += write(file_fd, data, total_size);
5312             total_size = meta.tuning_cpp_data_size;
5313             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5314             written_len += write(file_fd, data, total_size);
5315             total_size = meta.tuning_cac_data_size;
5316             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5317             written_len += write(file_fd, data, total_size);
5318             close(file_fd);
5319         }else {
5320             ALOGE("%s: fail to open file for metadata dumping", __func__);
5321         }
5322     }
5323 }
5324 
5325 /*===========================================================================
5326  * FUNCTION   : cleanAndSortStreamInfo
5327  *
5328  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5329  *              and sort them such that raw stream is at the end of the list
5330  *              This is a workaround for camera daemon constraint.
5331  *
5332  * PARAMETERS : None
5333  *
5334  *==========================================================================*/
cleanAndSortStreamInfo()5335 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5336 {
5337     List<stream_info_t *> newStreamInfo;
5338 
5339     /*clean up invalid streams*/
5340     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5341             it != mStreamInfo.end();) {
5342         if(((*it)->status) == INVALID){
5343             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5344             delete channel;
5345             free(*it);
5346             it = mStreamInfo.erase(it);
5347         } else {
5348             it++;
5349         }
5350     }
5351 
5352     // Move preview/video/callback/snapshot streams into newList
5353     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5354             it != mStreamInfo.end();) {
5355         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5356                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5357                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5358             newStreamInfo.push_back(*it);
5359             it = mStreamInfo.erase(it);
5360         } else
5361             it++;
5362     }
5363     // Move raw streams into newList
5364     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5365             it != mStreamInfo.end();) {
5366         newStreamInfo.push_back(*it);
5367         it = mStreamInfo.erase(it);
5368     }
5369 
5370     mStreamInfo = newStreamInfo;
5371 }
5372 
5373 /*===========================================================================
5374  * FUNCTION   : extractJpegMetadata
5375  *
5376  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5377  *              JPEG metadata is cached in HAL, and return as part of capture
5378  *              result when metadata is returned from camera daemon.
5379  *
5380  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5381  *              @request:      capture request
5382  *
5383  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)5384 void QCamera3HardwareInterface::extractJpegMetadata(
5385         CameraMetadata& jpegMetadata,
5386         const camera3_capture_request_t *request)
5387 {
5388     CameraMetadata frame_settings;
5389     frame_settings = request->settings;
5390 
5391     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5392         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5393                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5394                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5395 
5396     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5397         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5398                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5399                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5400 
5401     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5402         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5403                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5404                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5405 
5406     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5407         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5408                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5409                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5410 
5411     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5412         jpegMetadata.update(ANDROID_JPEG_QUALITY,
5413                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5414                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
5415 
5416     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5417         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5418                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5419                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5420 
5421     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5422         int32_t thumbnail_size[2];
5423         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5424         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5425         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5426             int32_t orientation =
5427                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5428             if ((orientation == 90) || (orientation == 270)) {
5429                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5430                int32_t temp;
5431                temp = thumbnail_size[0];
5432                thumbnail_size[0] = thumbnail_size[1];
5433                thumbnail_size[1] = temp;
5434             }
5435          }
5436          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5437                 thumbnail_size,
5438                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5439     }
5440 
5441 }
5442 
5443 /*===========================================================================
5444  * FUNCTION   : convertToRegions
5445  *
5446  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5447  *
5448  * PARAMETERS :
5449  *   @rect   : cam_rect_t struct to convert
5450  *   @region : int32_t destination array
5451  *   @weight : if we are converting from cam_area_t, weight is valid
5452  *             else weight = -1
5453  *
5454  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)5455 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5456         int32_t *region, int weight)
5457 {
5458     region[0] = rect.left;
5459     region[1] = rect.top;
5460     region[2] = rect.left + rect.width;
5461     region[3] = rect.top + rect.height;
5462     if (weight > -1) {
5463         region[4] = weight;
5464     }
5465 }
5466 
5467 /*===========================================================================
5468  * FUNCTION   : convertFromRegions
5469  *
5470  * DESCRIPTION: helper method to convert from array to cam_rect_t
5471  *
5472  * PARAMETERS :
5473  *   @rect   : cam_rect_t struct to convert
5474  *   @region : int32_t destination array
5475  *   @weight : if we are converting from cam_area_t, weight is valid
5476  *             else weight = -1
5477  *
5478  *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)5479 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5480         const camera_metadata_t *settings, uint32_t tag)
5481 {
5482     CameraMetadata frame_settings;
5483     frame_settings = settings;
5484     int32_t x_min = frame_settings.find(tag).data.i32[0];
5485     int32_t y_min = frame_settings.find(tag).data.i32[1];
5486     int32_t x_max = frame_settings.find(tag).data.i32[2];
5487     int32_t y_max = frame_settings.find(tag).data.i32[3];
5488     roi.weight = frame_settings.find(tag).data.i32[4];
5489     roi.rect.left = x_min;
5490     roi.rect.top = y_min;
5491     roi.rect.width = x_max - x_min;
5492     roi.rect.height = y_max - y_min;
5493 }
5494 
5495 /*===========================================================================
5496  * FUNCTION   : resetIfNeededROI
5497  *
5498  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5499  *              crop region
5500  *
5501  * PARAMETERS :
5502  *   @roi       : cam_area_t struct to resize
5503  *   @scalerCropRegion : cam_crop_region_t region to compare against
5504  *
5505  *
5506  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)5507 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5508                                                  const cam_crop_region_t* scalerCropRegion)
5509 {
5510     int32_t roi_x_max = roi->rect.width + roi->rect.left;
5511     int32_t roi_y_max = roi->rect.height + roi->rect.top;
5512     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5513     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5514 
5515     /* According to spec weight = 0 is used to indicate roi needs to be disabled
5516      * without having this check the calculations below to validate if the roi
5517      * is inside scalar crop region will fail resulting in the roi not being
5518      * reset causing algorithm to continue to use stale roi window
5519      */
5520     if (roi->weight == 0) {
5521         return true;
5522     }
5523 
5524     if ((roi_x_max < scalerCropRegion->left) ||
5525         // right edge of roi window is left of scalar crop's left edge
5526         (roi_y_max < scalerCropRegion->top)  ||
5527         // bottom edge of roi window is above scalar crop's top edge
5528         (roi->rect.left > crop_x_max) ||
5529         // left edge of roi window is beyond(right) of scalar crop's right edge
5530         (roi->rect.top > crop_y_max)){
5531         // top edge of roi windo is above scalar crop's top edge
5532         return false;
5533     }
5534     if (roi->rect.left < scalerCropRegion->left) {
5535         roi->rect.left = scalerCropRegion->left;
5536     }
5537     if (roi->rect.top < scalerCropRegion->top) {
5538         roi->rect.top = scalerCropRegion->top;
5539     }
5540     if (roi_x_max > crop_x_max) {
5541         roi_x_max = crop_x_max;
5542     }
5543     if (roi_y_max > crop_y_max) {
5544         roi_y_max = crop_y_max;
5545     }
5546     roi->rect.width = roi_x_max - roi->rect.left;
5547     roi->rect.height = roi_y_max - roi->rect.top;
5548     return true;
5549 }
5550 
5551 /*===========================================================================
5552  * FUNCTION   : convertLandmarks
5553  *
5554  * DESCRIPTION: helper method to extract the landmarks from face detection info
5555  *
5556  * PARAMETERS :
5557  *   @face   : cam_rect_t struct to convert
5558  *   @landmarks : int32_t destination array
5559  *
5560  *
5561  *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)5562 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5563 {
5564     landmarks[0] = (int32_t)face.left_eye_center.x;
5565     landmarks[1] = (int32_t)face.left_eye_center.y;
5566     landmarks[2] = (int32_t)face.right_eye_center.x;
5567     landmarks[3] = (int32_t)face.right_eye_center.y;
5568     landmarks[4] = (int32_t)face.mouth_center.x;
5569     landmarks[5] = (int32_t)face.mouth_center.y;
5570 }
5571 
5572 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5573 /*===========================================================================
5574  * FUNCTION   : initCapabilities
5575  *
5576  * DESCRIPTION: initialize camera capabilities in static data struct
5577  *
5578  * PARAMETERS :
5579  *   @cameraId  : camera Id
5580  *
5581  * RETURN     : int32_t type of status
5582  *              NO_ERROR  -- success
5583  *              none-zero failure code
5584  *==========================================================================*/
initCapabilities(uint32_t cameraId)5585 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5586 {
5587     int rc = 0;
5588     mm_camera_vtbl_t *cameraHandle = NULL;
5589     QCamera3HeapMemory *capabilityHeap = NULL;
5590 
5591     rc = camera_open((uint8_t)cameraId, &cameraHandle);
5592     if (rc || !cameraHandle) {
5593         ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5594         goto open_failed;
5595     }
5596 
5597     capabilityHeap = new QCamera3HeapMemory(1);
5598     if (capabilityHeap == NULL) {
5599         ALOGE("%s: creation of capabilityHeap failed", __func__);
5600         goto heap_creation_failed;
5601     }
5602     /* Allocate memory for capability buffer */
5603     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5604     if(rc != OK) {
5605         ALOGE("%s: No memory for cappability", __func__);
5606         goto allocate_failed;
5607     }
5608 
5609     /* Map memory for capability buffer */
5610     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5611     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5612                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
5613                                 capabilityHeap->getFd(0),
5614                                 sizeof(cam_capability_t));
5615     if(rc < 0) {
5616         ALOGE("%s: failed to map capability buffer", __func__);
5617         goto map_failed;
5618     }
5619 
5620     /* Query Capability */
5621     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5622     if(rc < 0) {
5623         ALOGE("%s: failed to query capability",__func__);
5624         goto query_failed;
5625     }
5626     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5627     if (!gCamCapability[cameraId]) {
5628         ALOGE("%s: out of memory", __func__);
5629         goto query_failed;
5630     }
5631     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5632                                         sizeof(cam_capability_t));
5633     rc = 0;
5634 
5635 query_failed:
5636     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5637                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
5638 map_failed:
5639     capabilityHeap->deallocate();
5640 allocate_failed:
5641     delete capabilityHeap;
5642 heap_creation_failed:
5643     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5644     cameraHandle = NULL;
5645 open_failed:
5646     return rc;
5647 }
5648 
5649 /*==========================================================================
5650  * FUNCTION   : get3Aversion
5651  *
5652  * DESCRIPTION: get the Q3A S/W version
5653  *
5654  * PARAMETERS :
5655  *  @sw_version: Reference of Q3A structure which will hold version info upon
5656  *               return
5657  *
5658  * RETURN     : None
5659  *
5660  *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)5661 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5662 {
5663     if(gCamCapability[mCameraId])
5664         sw_version = gCamCapability[mCameraId]->q3a_version;
5665     else
5666         ALOGE("%s:Capability structure NULL!", __func__);
5667 }
5668 
5669 
5670 /*===========================================================================
5671  * FUNCTION   : initParameters
5672  *
5673  * DESCRIPTION: initialize camera parameters
5674  *
5675  * PARAMETERS :
5676  *
5677  * RETURN     : int32_t type of status
5678  *              NO_ERROR  -- success
5679  *              none-zero failure code
5680  *==========================================================================*/
initParameters()5681 int QCamera3HardwareInterface::initParameters()
5682 {
5683     int rc = 0;
5684 
5685     //Allocate Set Param Buffer
5686     mParamHeap = new QCamera3HeapMemory(1);
5687     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5688     if(rc != OK) {
5689         rc = NO_MEMORY;
5690         ALOGE("Failed to allocate SETPARM Heap memory");
5691         delete mParamHeap;
5692         mParamHeap = NULL;
5693         return rc;
5694     }
5695 
5696     //Map memory for parameters buffer
5697     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5698             CAM_MAPPING_BUF_TYPE_PARM_BUF,
5699             mParamHeap->getFd(0),
5700             sizeof(metadata_buffer_t));
5701     if(rc < 0) {
5702         ALOGE("%s:failed to map SETPARM buffer",__func__);
5703         rc = FAILED_TRANSACTION;
5704         mParamHeap->deallocate();
5705         delete mParamHeap;
5706         mParamHeap = NULL;
5707         return rc;
5708     }
5709 
5710     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5711 
5712     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5713     return rc;
5714 }
5715 
5716 /*===========================================================================
5717  * FUNCTION   : deinitParameters
5718  *
5719  * DESCRIPTION: de-initialize camera parameters
5720  *
5721  * PARAMETERS :
5722  *
5723  * RETURN     : NONE
5724  *==========================================================================*/
deinitParameters()5725 void QCamera3HardwareInterface::deinitParameters()
5726 {
5727     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5728             CAM_MAPPING_BUF_TYPE_PARM_BUF);
5729 
5730     mParamHeap->deallocate();
5731     delete mParamHeap;
5732     mParamHeap = NULL;
5733 
5734     mParameters = NULL;
5735 
5736     free(mPrevParameters);
5737     mPrevParameters = NULL;
5738 }
5739 
5740 /*===========================================================================
5741  * FUNCTION   : calcMaxJpegSize
5742  *
5743  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5744  *
5745  * PARAMETERS :
5746  *
5747  * RETURN     : max_jpeg_size
5748  *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)5749 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5750 {
5751     size_t max_jpeg_size = 0;
5752     size_t temp_width, temp_height;
5753     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5754             MAX_SIZES_CNT);
5755     for (size_t i = 0; i < count; i++) {
5756         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5757         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5758         if (temp_width * temp_height > max_jpeg_size ) {
5759             max_jpeg_size = temp_width * temp_height;
5760         }
5761     }
5762     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5763     return max_jpeg_size;
5764 }
5765 
5766 /*===========================================================================
5767  * FUNCTION   : getMaxRawSize
5768  *
5769  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5770  *
5771  * PARAMETERS :
5772  *
5773  * RETURN     : Largest supported Raw Dimension
5774  *==========================================================================*/
getMaxRawSize(uint32_t camera_id)5775 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5776 {
5777     int max_width = 0;
5778     cam_dimension_t maxRawSize;
5779 
5780     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5781     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5782         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5783             max_width = gCamCapability[camera_id]->raw_dim[i].width;
5784             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5785         }
5786     }
5787     return maxRawSize;
5788 }
5789 
5790 
5791 /*===========================================================================
5792  * FUNCTION   : calcMaxJpegDim
5793  *
5794  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5795  *
5796  * PARAMETERS :
5797  *
5798  * RETURN     : max_jpeg_dim
5799  *==========================================================================*/
calcMaxJpegDim()5800 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5801 {
5802     cam_dimension_t max_jpeg_dim;
5803     cam_dimension_t curr_jpeg_dim;
5804     max_jpeg_dim.width = 0;
5805     max_jpeg_dim.height = 0;
5806     curr_jpeg_dim.width = 0;
5807     curr_jpeg_dim.height = 0;
5808     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5809         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5810         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5811         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5812             max_jpeg_dim.width * max_jpeg_dim.height ) {
5813             max_jpeg_dim.width = curr_jpeg_dim.width;
5814             max_jpeg_dim.height = curr_jpeg_dim.height;
5815         }
5816     }
5817     return max_jpeg_dim;
5818 }
5819 
5820 /*===========================================================================
5821  * FUNCTION   : addStreamConfig
5822  *
5823  * DESCRIPTION: adds the stream configuration to the array
5824  *
5825  * PARAMETERS :
5826  * @available_stream_configs : pointer to stream configuration array
5827  * @scalar_format            : scalar format
5828  * @dim                      : configuration dimension
5829  * @config_type              : input or output configuration type
5830  *
5831  * RETURN     : NONE
5832  *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)5833 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5834         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5835 {
5836     available_stream_configs.add(scalar_format);
5837     available_stream_configs.add(dim.width);
5838     available_stream_configs.add(dim.height);
5839     available_stream_configs.add(config_type);
5840 }
5841 
5842 
5843 /*===========================================================================
5844  * FUNCTION   : initStaticMetadata
5845  *
5846  * DESCRIPTION: initialize the static metadata
5847  *
5848  * PARAMETERS :
5849  *   @cameraId  : camera Id
5850  *
5851  * RETURN     : int32_t type of status
5852  *              0  -- success
5853  *              non-zero failure code
5854  *==========================================================================*/
initStaticMetadata(uint32_t cameraId)5855 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5856 {
5857     int rc = 0;
5858     CameraMetadata staticInfo;
5859     size_t count = 0;
5860     bool limitedDevice = false;
5861     char prop[PROPERTY_VALUE_MAX];
5862 
5863     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5864      * guaranteed, its advertised as limited device */
5865     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5866             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5867 
5868     uint8_t supportedHwLvl = limitedDevice ?
5869             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5870             // No capability check done here to distinguish LEVEL_FULL from
5871             // LEVEL_3 - assuming this HAL will not run on devices that only
5872             // meet FULL spec
5873             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
5874 
5875     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5876             &supportedHwLvl, 1);
5877 
5878     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5879     /*HAL 3 only*/
5880     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5881                     &gCamCapability[cameraId]->min_focus_distance, 1);
5882 
5883     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5884                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
5885 
5886     /*should be using focal lengths but sensor doesn't provide that info now*/
5887     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5888                       &gCamCapability[cameraId]->focal_length,
5889                       1);
5890 
5891     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5892                       gCamCapability[cameraId]->apertures,
5893                       gCamCapability[cameraId]->apertures_count);
5894 
5895     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5896                 gCamCapability[cameraId]->filter_densities,
5897                 gCamCapability[cameraId]->filter_densities_count);
5898 
5899 
5900     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5901                       (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5902                       gCamCapability[cameraId]->optical_stab_modes_count);
5903 
5904     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5905             gCamCapability[cameraId]->lens_shading_map_size.height};
5906     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5907                       lens_shading_map_size,
5908                       sizeof(lens_shading_map_size)/sizeof(int32_t));
5909 
5910     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5911             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5912 
5913     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5914             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5915 
5916     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5917             &gCamCapability[cameraId]->max_frame_duration, 1);
5918 
5919     camera_metadata_rational baseGainFactor = {
5920             gCamCapability[cameraId]->base_gain_factor.numerator,
5921             gCamCapability[cameraId]->base_gain_factor.denominator};
5922     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5923                       &baseGainFactor, 1);
5924 
5925     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5926                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5927 
5928     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5929             gCamCapability[cameraId]->pixel_array_size.height};
5930     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5931                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5932 
5933     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5934                                                 gCamCapability[cameraId]->active_array_size.top,
5935                                                 gCamCapability[cameraId]->active_array_size.width,
5936                                                 gCamCapability[cameraId]->active_array_size.height};
5937     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5938                       active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5939 
5940     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5941             &gCamCapability[cameraId]->white_level, 1);
5942 
5943     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5944             gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5945 
5946     bool hasBlackRegions = false;
5947     if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
5948             gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
5949         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
5950         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
5951             // Left
5952             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
5953             //Top
5954             opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
5955             // Width
5956             opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
5957                     gCamCapability[cameraId]->optical_black_regions[i];
5958             // Height
5959             opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
5960                     gCamCapability[cameraId]->optical_black_regions[i + 1];
5961         }
5962         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
5963                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
5964         hasBlackRegions = true;
5965     }
5966 
5967     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5968                       &gCamCapability[cameraId]->flash_charge_duration, 1);
5969 
5970     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5971                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5972 
5973     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
5974             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
5975             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
5976     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5977             &timestampSource, 1);
5978 
5979     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5980                       &gCamCapability[cameraId]->histogram_size, 1);
5981 
5982     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5983             &gCamCapability[cameraId]->max_histogram_count, 1);
5984 
5985     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5986             gCamCapability[cameraId]->sharpness_map_size.height};
5987 
5988     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5989             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5990 
5991     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5992             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5993 
5994     int32_t scalar_formats[] = {
5995             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5996             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5997             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5998             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5999             HAL_PIXEL_FORMAT_RAW10,
6000             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6001     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6002     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6003                       scalar_formats,
6004                       scalar_formats_count);
6005 
6006     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6007     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6008     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6009             count, MAX_SIZES_CNT, available_processed_sizes);
6010     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6011             available_processed_sizes, count * 2);
6012 
6013     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6014     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6015     makeTable(gCamCapability[cameraId]->raw_dim,
6016             count, MAX_SIZES_CNT, available_raw_sizes);
6017     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6018             available_raw_sizes, count * 2);
6019 
6020     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6021     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6022     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6023             count, MAX_SIZES_CNT, available_fps_ranges);
6024     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6025             available_fps_ranges, count * 2);
6026 
6027     camera_metadata_rational exposureCompensationStep = {
6028             gCamCapability[cameraId]->exp_compensation_step.numerator,
6029             gCamCapability[cameraId]->exp_compensation_step.denominator};
6030     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6031                       &exposureCompensationStep, 1);
6032 
6033     Vector<uint8_t> availableVstabModes;
6034     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6035     char eis_prop[PROPERTY_VALUE_MAX];
6036     memset(eis_prop, 0, sizeof(eis_prop));
6037     property_get("persist.camera.eis.enable", eis_prop, "0");
6038     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6039     if (facingBack && eis_prop_set) {
6040         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6041     }
6042     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6043                       availableVstabModes.array(), availableVstabModes.size());
6044 
6045     /*HAL 1 and HAL 3 common*/
6046     float maxZoom = 4;
6047     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6048             &maxZoom, 1);
6049 
6050     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
6051     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6052 
6053     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6054     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6055         max3aRegions[2] = 0; /* AF not supported */
6056     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6057             max3aRegions, 3);
6058 
6059     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6060     memset(prop, 0, sizeof(prop));
6061     property_get("persist.camera.facedetect", prop, "1");
6062     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6063     CDBG("%s: Support face detection mode: %d",
6064             __func__, supportedFaceDetectMode);
6065 
6066     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6067     Vector<uint8_t> availableFaceDetectModes;
6068     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6069     if (supportedFaceDetectMode == 1) {
6070         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6071     } else if (supportedFaceDetectMode == 2) {
6072         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6073     } else if (supportedFaceDetectMode == 3) {
6074         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6075         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6076     } else {
6077         maxFaces = 0;
6078     }
6079     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6080             availableFaceDetectModes.array(),
6081             availableFaceDetectModes.size());
6082     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6083             (int32_t *)&maxFaces, 1);
6084 
6085     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
6086                                            gCamCapability[cameraId]->exposure_compensation_max};
6087     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6088             exposureCompensationRange,
6089             sizeof(exposureCompensationRange)/sizeof(int32_t));
6090 
6091     uint8_t lensFacing = (facingBack) ?
6092             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6093     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6094 
6095     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6096                       available_thumbnail_sizes,
6097                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6098 
6099     /*all sizes will be clubbed into this tag*/
6100     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
6101     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6102     size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
6103             count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
6104             gCamCapability[cameraId]->max_downscale_factor);
6105     /*android.scaler.availableStreamConfigurations*/
6106     size_t max_stream_configs_size = count * scalar_formats_count * 4;
6107     Vector<int32_t> available_stream_configs;
6108     cam_dimension_t active_array_dim;
6109     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6110     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6111     /* Add input/output stream configurations for each scalar formats*/
6112     for (size_t j = 0; j < scalar_formats_count; j++) {
6113         switch (scalar_formats[j]) {
6114         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6115         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6116         case HAL_PIXEL_FORMAT_RAW10:
6117             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6118                 addStreamConfig(available_stream_configs, scalar_formats[j],
6119                         gCamCapability[cameraId]->raw_dim[i],
6120                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6121             }
6122             break;
6123         case HAL_PIXEL_FORMAT_BLOB:
6124             cam_dimension_t jpeg_size;
6125             for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
6126                 jpeg_size.width  = available_jpeg_sizes[i*2];
6127                 jpeg_size.height = available_jpeg_sizes[i*2+1];
6128                 addStreamConfig(available_stream_configs, scalar_formats[j],
6129                         jpeg_size,
6130                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6131             }
6132             break;
6133         case HAL_PIXEL_FORMAT_YCbCr_420_888:
6134         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6135         default:
6136             cam_dimension_t largest_picture_size;
6137             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6138             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6139                 addStreamConfig(available_stream_configs, scalar_formats[j],
6140                         gCamCapability[cameraId]->picture_sizes_tbl[i],
6141                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6142                 /* Book keep largest */
6143                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6144                         >= largest_picture_size.width &&
6145                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
6146                         >= largest_picture_size.height)
6147                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6148             }
6149             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6150             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6151                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6152                  addStreamConfig(available_stream_configs, scalar_formats[j],
6153                          largest_picture_size,
6154                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6155             }
6156             break;
6157         }
6158     }
6159 
6160     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6161                       available_stream_configs.array(), available_stream_configs.size());
6162     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6163     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6164 
6165     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6166     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6167 
6168     /* android.scaler.availableMinFrameDurations */
6169     int64_t available_min_durations[max_stream_configs_size];
6170     size_t idx = 0;
6171     for (size_t j = 0; j < scalar_formats_count; j++) {
6172         switch (scalar_formats[j]) {
6173         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6174         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6175         case HAL_PIXEL_FORMAT_RAW10:
6176             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6177                 available_min_durations[idx] = scalar_formats[j];
6178                 available_min_durations[idx+1] =
6179                     gCamCapability[cameraId]->raw_dim[i].width;
6180                 available_min_durations[idx+2] =
6181                     gCamCapability[cameraId]->raw_dim[i].height;
6182                 available_min_durations[idx+3] =
6183                     gCamCapability[cameraId]->raw_min_duration[i];
6184                 idx+=4;
6185             }
6186             break;
6187         default:
6188             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6189                 available_min_durations[idx] = scalar_formats[j];
6190                 available_min_durations[idx+1] =
6191                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6192                 available_min_durations[idx+2] =
6193                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6194                 available_min_durations[idx+3] =
6195                     gCamCapability[cameraId]->picture_min_duration[i];
6196                 idx+=4;
6197             }
6198             break;
6199         }
6200     }
6201     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6202                       &available_min_durations[0], idx);
6203 
6204     Vector<int32_t> available_hfr_configs;
6205     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6206         int32_t fps = 0;
6207         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6208         case CAM_HFR_MODE_60FPS:
6209             fps = 60;
6210             break;
6211         case CAM_HFR_MODE_90FPS:
6212             fps = 90;
6213             break;
6214         case CAM_HFR_MODE_120FPS:
6215             fps = 120;
6216             break;
6217         case CAM_HFR_MODE_150FPS:
6218             fps = 150;
6219             break;
6220         case CAM_HFR_MODE_180FPS:
6221             fps = 180;
6222             break;
6223         case CAM_HFR_MODE_210FPS:
6224             fps = 210;
6225             break;
6226         case CAM_HFR_MODE_240FPS:
6227             fps = 240;
6228             break;
6229         case CAM_HFR_MODE_480FPS:
6230             fps = 480;
6231             break;
6232         case CAM_HFR_MODE_OFF:
6233         case CAM_HFR_MODE_MAX:
6234         default:
6235             break;
6236         }
6237 
6238         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6239         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6240             /* For each HFR frame rate, need to advertise one variable fps range
6241              * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6242              * [120, 120]. While camcorder preview alone is running [30, 120] is
6243              * set by the app. When video recording is started, [120, 120] is
6244              * set. This way sensor configuration does not change when recording
6245              * is started */
6246 
6247             /* (width, height, fps_min, fps_max, batch_size_max) */
6248             available_hfr_configs.add(
6249                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6250             available_hfr_configs.add(
6251                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6252             available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6253             available_hfr_configs.add(fps);
6254             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6255 
6256             /* (width, height, fps_min, fps_max, batch_size_max) */
6257             available_hfr_configs.add(
6258                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6259             available_hfr_configs.add(
6260                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6261             available_hfr_configs.add(fps);
6262             available_hfr_configs.add(fps);
6263             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6264        }
6265     }
6266     //Advertise HFR capability only if the property is set
6267     memset(prop, 0, sizeof(prop));
6268     property_get("persist.camera.hal3hfr.enable", prop, "1");
6269     uint8_t hfrEnable = (uint8_t)atoi(prop);
6270 
6271     if(hfrEnable && available_hfr_configs.array()) {
6272         staticInfo.update(
6273                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6274                 available_hfr_configs.array(), available_hfr_configs.size());
6275     }
6276 
6277     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6278     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6279                       &max_jpeg_size, 1);
6280 
6281     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6282     size_t size = 0;
6283     count = CAM_EFFECT_MODE_MAX;
6284     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6285     for (size_t i = 0; i < count; i++) {
6286         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6287                 gCamCapability[cameraId]->supported_effects[i]);
6288         if (NAME_NOT_FOUND != val) {
6289             avail_effects[size] = (uint8_t)val;
6290             size++;
6291         }
6292     }
6293     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6294                       avail_effects,
6295                       size);
6296 
6297     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6298     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6299     size_t supported_scene_modes_cnt = 0;
6300     count = CAM_SCENE_MODE_MAX;
6301     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6302     for (size_t i = 0; i < count; i++) {
6303         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6304                 CAM_SCENE_MODE_OFF) {
6305             int val = lookupFwkName(SCENE_MODES_MAP,
6306                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
6307                     gCamCapability[cameraId]->supported_scene_modes[i]);
6308             if (NAME_NOT_FOUND != val) {
6309                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6310                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6311                 supported_scene_modes_cnt++;
6312             }
6313         }
6314     }
6315     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6316                       avail_scene_modes,
6317                       supported_scene_modes_cnt);
6318 
6319     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6320     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6321                       supported_scene_modes_cnt,
6322                       CAM_SCENE_MODE_MAX,
6323                       scene_mode_overrides,
6324                       supported_indexes,
6325                       cameraId);
6326 
6327     if (supported_scene_modes_cnt == 0) {
6328         supported_scene_modes_cnt = 1;
6329         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6330     }
6331 
6332     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6333             scene_mode_overrides, supported_scene_modes_cnt * 3);
6334 
6335     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6336                                          ANDROID_CONTROL_MODE_AUTO,
6337                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6338     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6339             available_control_modes,
6340             3);
6341 
6342     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6343     size = 0;
6344     count = CAM_ANTIBANDING_MODE_MAX;
6345     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6346     for (size_t i = 0; i < count; i++) {
6347         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6348                 gCamCapability[cameraId]->supported_antibandings[i]);
6349         if (NAME_NOT_FOUND != val) {
6350             avail_antibanding_modes[size] = (uint8_t)val;
6351             size++;
6352         }
6353 
6354     }
6355     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6356                       avail_antibanding_modes,
6357                       size);
6358 
6359     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6360     size = 0;
6361     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6362     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6363     if (0 == count) {
6364         avail_abberation_modes[0] =
6365                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6366         size++;
6367     } else {
6368         for (size_t i = 0; i < count; i++) {
6369             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6370                     gCamCapability[cameraId]->aberration_modes[i]);
6371             if (NAME_NOT_FOUND != val) {
6372                 avail_abberation_modes[size] = (uint8_t)val;
6373                 size++;
6374             } else {
6375                 ALOGE("%s: Invalid CAC mode %d", __func__,
6376                         gCamCapability[cameraId]->aberration_modes[i]);
6377                 break;
6378             }
6379         }
6380 
6381     }
6382     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6383             avail_abberation_modes,
6384             size);
6385 
6386     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6387     size = 0;
6388     count = CAM_FOCUS_MODE_MAX;
6389     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6390     for (size_t i = 0; i < count; i++) {
6391         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6392                 gCamCapability[cameraId]->supported_focus_modes[i]);
6393         if (NAME_NOT_FOUND != val) {
6394             avail_af_modes[size] = (uint8_t)val;
6395             size++;
6396         }
6397     }
6398     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6399                       avail_af_modes,
6400                       size);
6401 
6402     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6403     size = 0;
6404     count = CAM_WB_MODE_MAX;
6405     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6406     for (size_t i = 0; i < count; i++) {
6407         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6408                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6409                 gCamCapability[cameraId]->supported_white_balances[i]);
6410         if (NAME_NOT_FOUND != val) {
6411             avail_awb_modes[size] = (uint8_t)val;
6412             size++;
6413         }
6414     }
6415     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6416                       avail_awb_modes,
6417                       size);
6418 
6419     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6420     count = CAM_FLASH_FIRING_LEVEL_MAX;
6421     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6422             count);
6423     for (size_t i = 0; i < count; i++) {
6424         available_flash_levels[i] =
6425                 gCamCapability[cameraId]->supported_firing_levels[i];
6426     }
6427     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6428             available_flash_levels, count);
6429 
6430     uint8_t flashAvailable;
6431     if (gCamCapability[cameraId]->flash_available)
6432         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6433     else
6434         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6435     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6436             &flashAvailable, 1);
6437 
6438     Vector<uint8_t> avail_ae_modes;
6439     count = CAM_AE_MODE_MAX;
6440     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6441     for (size_t i = 0; i < count; i++) {
6442         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6443     }
6444     if (flashAvailable) {
6445         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6446         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6447     }
6448     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6449                       avail_ae_modes.array(),
6450                       avail_ae_modes.size());
6451 
6452     int32_t sensitivity_range[2];
6453     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6454     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6455     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6456                       sensitivity_range,
6457                       sizeof(sensitivity_range) / sizeof(int32_t));
6458 
6459     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6460                       &gCamCapability[cameraId]->max_analog_sensitivity,
6461                       1);
6462 
6463     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6464     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6465                       &sensor_orientation,
6466                       1);
6467 
6468     int32_t max_output_streams[] = {
6469             MAX_STALLING_STREAMS,
6470             MAX_PROCESSED_STREAMS,
6471             MAX_RAW_STREAMS};
6472     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6473             max_output_streams,
6474             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6475 
6476     uint8_t avail_leds = 0;
6477     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6478                       &avail_leds, 0);
6479 
6480     uint8_t focus_dist_calibrated;
6481     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6482             gCamCapability[cameraId]->focus_dist_calibrated);
6483     if (NAME_NOT_FOUND != val) {
6484         focus_dist_calibrated = (uint8_t)val;
6485         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6486                      &focus_dist_calibrated, 1);
6487     }
6488 
6489     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6490     size = 0;
6491     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6492             MAX_TEST_PATTERN_CNT);
6493     for (size_t i = 0; i < count; i++) {
6494         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6495                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6496         if (NAME_NOT_FOUND != testpatternMode) {
6497             avail_testpattern_modes[size] = testpatternMode;
6498             size++;
6499         }
6500     }
6501     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6502                       avail_testpattern_modes,
6503                       size);
6504 
6505     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6506     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6507                       &max_pipeline_depth,
6508                       1);
6509 
6510     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6511     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6512                       &partial_result_count,
6513                        1);
6514 
6515     int32_t max_stall_duration = MAX_REPROCESS_STALL;
6516     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6517 
6518     Vector<uint8_t> available_capabilities;
6519     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6520     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6521     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6522     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6523     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6524     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6525     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6526     if (hfrEnable && available_hfr_configs.array()) {
6527         available_capabilities.add(
6528                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6529     }
6530 
6531     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6532         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6533     }
6534     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6535             available_capabilities.array(),
6536             available_capabilities.size());
6537 
6538     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6539     //BURST_CAPTURE.
6540     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6541             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6542 
6543     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6544             &aeLockAvailable, 1);
6545 
6546     //awbLockAvailable to be set to true if capabilities has
6547     //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6548     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6549             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6550 
6551     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6552             &awbLockAvailable, 1);
6553 
6554     int32_t max_input_streams = 1;
6555     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6556                       &max_input_streams,
6557                       1);
6558 
6559     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6560     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6561             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6562             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6563             HAL_PIXEL_FORMAT_YCbCr_420_888};
6564     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6565                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6566 
6567     int32_t max_latency = (limitedDevice) ?
6568             CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6569     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6570                       &max_latency,
6571                       1);
6572 
6573     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6574                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6575     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6576             available_hot_pixel_modes,
6577             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6578 
6579     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6580                                          ANDROID_SHADING_MODE_FAST,
6581                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
6582     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6583                       available_shading_modes,
6584                       3);
6585 
6586     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6587                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6588     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6589                       available_lens_shading_map_modes,
6590                       2);
6591 
6592     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6593                                       ANDROID_EDGE_MODE_FAST,
6594                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
6595                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6596     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6597             available_edge_modes,
6598             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6599 
6600     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6601                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
6602                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6603                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6604                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6605     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6606             available_noise_red_modes,
6607             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6608 
6609     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6610                                          ANDROID_TONEMAP_MODE_FAST,
6611                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6612     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6613             available_tonemap_modes,
6614             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6615 
6616     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6617     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6618             available_hot_pixel_map_modes,
6619             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6620 
6621     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6622             gCamCapability[cameraId]->reference_illuminant1);
6623     if (NAME_NOT_FOUND != val) {
6624         uint8_t fwkReferenceIlluminant = (uint8_t)val;
6625         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6626     }
6627 
6628     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6629             gCamCapability[cameraId]->reference_illuminant2);
6630     if (NAME_NOT_FOUND != val) {
6631         uint8_t fwkReferenceIlluminant = (uint8_t)val;
6632         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6633     }
6634 
6635     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6636             (void *)gCamCapability[cameraId]->forward_matrix1,
6637             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6638 
6639     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6640             (void *)gCamCapability[cameraId]->forward_matrix2,
6641             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6642 
6643     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6644             (void *)gCamCapability[cameraId]->color_transform1,
6645             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6646 
6647     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6648             (void *)gCamCapability[cameraId]->color_transform2,
6649             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6650 
6651     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6652             (void *)gCamCapability[cameraId]->calibration_transform1,
6653             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6654 
6655     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6656             (void *)gCamCapability[cameraId]->calibration_transform2,
6657             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6658 
6659     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6660        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6661        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6662        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6663        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6664        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6665        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6666        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6667        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6668        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6669        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6670        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6671        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6672        ANDROID_JPEG_GPS_COORDINATES,
6673        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6674        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6675        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6676        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6677        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6678        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6679        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6680        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6681        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6682        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6683        ANDROID_STATISTICS_FACE_DETECT_MODE,
6684        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6685        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6686        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6687        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
6688 
6689     size_t request_keys_cnt =
6690             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6691     Vector<int32_t> available_request_keys;
6692     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6693     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6694         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6695     }
6696 
6697     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6698             available_request_keys.array(), available_request_keys.size());
6699 
6700     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6701        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6702        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6703        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6704        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6705        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6706        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6707        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6708        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6709        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6710        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6711        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6712        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6713        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6714        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6715        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6716        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6717        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6718        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6719        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6720        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6721        ANDROID_STATISTICS_FACE_SCORES,
6722        ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
6723        ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
6724        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
6725     size_t result_keys_cnt =
6726             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6727 
6728     Vector<int32_t> available_result_keys;
6729     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6730     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6731         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6732     }
6733     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6734        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6735        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6736     }
6737     if (supportedFaceDetectMode == 1) {
6738         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6739         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6740     } else if ((supportedFaceDetectMode == 2) ||
6741             (supportedFaceDetectMode == 3)) {
6742         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6743         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6744     }
6745     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6746             available_result_keys.array(), available_result_keys.size());
6747 
6748     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6749        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6750        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6751        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6752        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6753        ANDROID_SCALER_CROPPING_TYPE,
6754        ANDROID_SYNC_MAX_LATENCY,
6755        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6756        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6757        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6758        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6759        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6760        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6761        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6762        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6763        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6764        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6765        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6766        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6767        ANDROID_LENS_FACING,
6768        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6769        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6770        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6771        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6772        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6773        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6774        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6775        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6776        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6777        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6778        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6779        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6780        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6781        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6782        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6783        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6784        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6785        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6786        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6787        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6788        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6789        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6790        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6791        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6792        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6793        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6794        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6795        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6796        ANDROID_TONEMAP_MAX_CURVE_POINTS,
6797        ANDROID_CONTROL_AVAILABLE_MODES,
6798        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6799        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6800        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6801        ANDROID_SHADING_AVAILABLE_MODES,
6802        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6803 
6804     Vector<int32_t> available_characteristics_keys;
6805     available_characteristics_keys.appendArray(characteristics_keys_basic,
6806             sizeof(characteristics_keys_basic)/sizeof(int32_t));
6807     if (hasBlackRegions) {
6808         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
6809     }
6810     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6811                       available_characteristics_keys.array(),
6812                       available_characteristics_keys.size());
6813 
6814     /*available stall durations depend on the hw + sw and will be different for different devices */
6815     /*have to add for raw after implementation*/
6816     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6817     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6818 
6819     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6820     size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6821             MAX_SIZES_CNT);
6822     size_t available_stall_size = count * 4;
6823     int64_t available_stall_durations[available_stall_size];
6824     idx = 0;
6825     for (uint32_t j = 0; j < stall_formats_count; j++) {
6826        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6827           for (uint32_t i = 0; i < count; i++) {
6828              available_stall_durations[idx]   = stall_formats[j];
6829              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6830              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6831              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6832              idx+=4;
6833           }
6834        } else {
6835           for (uint32_t i = 0; i < raw_count; i++) {
6836              available_stall_durations[idx]   = stall_formats[j];
6837              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6838              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6839              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6840              idx+=4;
6841           }
6842        }
6843     }
6844     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6845                       available_stall_durations,
6846                       idx);
6847     //QCAMERA3_OPAQUE_RAW
6848     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6849     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6850     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6851     case LEGACY_RAW:
6852         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6853             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6854         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6855             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6856         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6857             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6858         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6859         break;
6860     case MIPI_RAW:
6861         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6862             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6863         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6864             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6865         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6866             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6867         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6868         break;
6869     default:
6870         ALOGE("%s: unknown opaque_raw_format %d", __func__,
6871                 gCamCapability[cameraId]->opaque_raw_fmt);
6872         break;
6873     }
6874     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6875 
6876     int32_t strides[3*raw_count];
6877     for (size_t i = 0; i < raw_count; i++) {
6878         cam_stream_buf_plane_info_t buf_planes;
6879         strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6880         strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6881         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6882             &gCamCapability[cameraId]->padding_info, &buf_planes);
6883         strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6884     }
6885     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6886             3*raw_count);
6887 
6888     gStaticMetadata[cameraId] = staticInfo.release();
6889     return rc;
6890 }
6891 
6892 /*===========================================================================
6893  * FUNCTION   : makeTable
6894  *
6895  * DESCRIPTION: make a table of sizes
6896  *
6897  * PARAMETERS :
6898  *
6899  *
6900  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)6901 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6902         size_t max_size, int32_t *sizeTable)
6903 {
6904     size_t j = 0;
6905     if (size > max_size) {
6906        size = max_size;
6907     }
6908     for (size_t i = 0; i < size; i++) {
6909         sizeTable[j] = dimTable[i].width;
6910         sizeTable[j+1] = dimTable[i].height;
6911         j+=2;
6912     }
6913 }
6914 
6915 /*===========================================================================
6916  * FUNCTION   : makeFPSTable
6917  *
6918  * DESCRIPTION: make a table of fps ranges
6919  *
6920  * PARAMETERS :
6921  *
6922  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)6923 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6924         size_t max_size, int32_t *fpsRangesTable)
6925 {
6926     size_t j = 0;
6927     if (size > max_size) {
6928        size = max_size;
6929     }
6930     for (size_t i = 0; i < size; i++) {
6931         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6932         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6933         j+=2;
6934     }
6935 }
6936 
6937 /*===========================================================================
6938  * FUNCTION   : makeOverridesList
6939  *
6940  * DESCRIPTION: make a list of scene mode overrides
6941  *
6942  * PARAMETERS :
6943  *
6944  *
6945  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)6946 void QCamera3HardwareInterface::makeOverridesList(
6947         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6948         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6949 {
6950     /*daemon will give a list of overrides for all scene modes.
6951       However we should send the fwk only the overrides for the scene modes
6952       supported by the framework*/
6953     size_t j = 0;
6954     if (size > max_size) {
6955        size = max_size;
6956     }
6957     size_t focus_count = CAM_FOCUS_MODE_MAX;
6958     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6959             focus_count);
6960     for (size_t i = 0; i < size; i++) {
6961         bool supt = false;
6962         size_t index = supported_indexes[i];
6963         overridesList[j] = gCamCapability[camera_id]->flash_available ?
6964                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6965         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6966                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6967                 overridesTable[index].awb_mode);
6968         if (NAME_NOT_FOUND != val) {
6969             overridesList[j+1] = (uint8_t)val;
6970         }
6971         uint8_t focus_override = overridesTable[index].af_mode;
6972         for (size_t k = 0; k < focus_count; k++) {
6973            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6974               supt = true;
6975               break;
6976            }
6977         }
6978         if (supt) {
6979             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6980                     focus_override);
6981             if (NAME_NOT_FOUND != val) {
6982                 overridesList[j+2] = (uint8_t)val;
6983             }
6984         } else {
6985            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6986         }
6987         j+=3;
6988     }
6989 }
6990 
6991 /*===========================================================================
6992  * FUNCTION   : filterJpegSizes
6993  *
6994  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6995  *              could be downscaled to
6996  *
6997  * PARAMETERS :
6998  *
6999  * RETURN     : length of jpegSizes array
7000  *==========================================================================*/
7001 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)7002 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7003         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7004         uint8_t downscale_factor)
7005 {
7006     if (0 == downscale_factor) {
7007         downscale_factor = 1;
7008     }
7009 
7010     int32_t min_width = active_array_size.width / downscale_factor;
7011     int32_t min_height = active_array_size.height / downscale_factor;
7012     size_t jpegSizesCnt = 0;
7013     if (processedSizesCnt > maxCount) {
7014         processedSizesCnt = maxCount;
7015     }
7016     for (size_t i = 0; i < processedSizesCnt; i+=2) {
7017         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7018             jpegSizes[jpegSizesCnt] = processedSizes[i];
7019             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7020             jpegSizesCnt += 2;
7021         }
7022     }
7023     return jpegSizesCnt;
7024 }
7025 
7026 /*===========================================================================
7027  * FUNCTION   : getPreviewHalPixelFormat
7028  *
7029  * DESCRIPTION: convert the format to type recognized by framework
7030  *
7031  * PARAMETERS : format : the format from backend
7032  *
7033  ** RETURN    : format recognized by framework
7034  *
7035  *==========================================================================*/
getScalarFormat(int32_t format)7036 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
7037 {
7038     int32_t halPixelFormat;
7039 
7040     switch (format) {
7041     case CAM_FORMAT_YUV_420_NV12:
7042         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
7043         break;
7044     case CAM_FORMAT_YUV_420_NV21:
7045         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7046         break;
7047     case CAM_FORMAT_YUV_420_NV21_ADRENO:
7048         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
7049         break;
7050     case CAM_FORMAT_YUV_420_YV12:
7051         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
7052         break;
7053     case CAM_FORMAT_YUV_422_NV16:
7054     case CAM_FORMAT_YUV_422_NV61:
7055     default:
7056         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7057         break;
7058     }
7059     return halPixelFormat;
7060 }
7061 
7062 /*===========================================================================
7063  * FUNCTION   : computeNoiseModelEntryS
7064  *
7065  * DESCRIPTION: function to map a given sensitivity to the S noise
7066  *              model parameters in the DNG noise model.
7067  *
7068  * PARAMETERS : sens : the sensor sensitivity
7069  *
7070  ** RETURN    : S (sensor amplification) noise
7071  *
7072  *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)7073 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7074     double s = gCamCapability[mCameraId]->gradient_S * sens +
7075             gCamCapability[mCameraId]->offset_S;
7076     return ((s < 0.0) ? 0.0 : s);
7077 }
7078 
7079 /*===========================================================================
7080  * FUNCTION   : computeNoiseModelEntryO
7081  *
7082  * DESCRIPTION: function to map a given sensitivity to the O noise
7083  *              model parameters in the DNG noise model.
7084  *
7085  * PARAMETERS : sens : the sensor sensitivity
7086  *
7087  ** RETURN    : O (sensor readout) noise
7088  *
7089  *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)7090 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7091     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7092     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7093             1.0 : (1.0 * sens / max_analog_sens);
7094     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7095             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7096     return ((o < 0.0) ? 0.0 : o);
7097 }
7098 
7099 /*===========================================================================
7100  * FUNCTION   : getSensorSensitivity
7101  *
7102  * DESCRIPTION: convert iso_mode to an integer value
7103  *
7104  * PARAMETERS : iso_mode : the iso_mode supported by sensor
7105  *
7106  ** RETURN    : sensitivity supported by sensor
7107  *
7108  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)7109 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7110 {
7111     int32_t sensitivity;
7112 
7113     switch (iso_mode) {
7114     case CAM_ISO_MODE_100:
7115         sensitivity = 100;
7116         break;
7117     case CAM_ISO_MODE_200:
7118         sensitivity = 200;
7119         break;
7120     case CAM_ISO_MODE_400:
7121         sensitivity = 400;
7122         break;
7123     case CAM_ISO_MODE_800:
7124         sensitivity = 800;
7125         break;
7126     case CAM_ISO_MODE_1600:
7127         sensitivity = 1600;
7128         break;
7129     default:
7130         sensitivity = -1;
7131         break;
7132     }
7133     return sensitivity;
7134 }
7135 
7136 /*===========================================================================
7137  * FUNCTION   : getCamInfo
7138  *
7139  * DESCRIPTION: query camera capabilities
7140  *
7141  * PARAMETERS :
7142  *   @cameraId  : camera Id
7143  *   @info      : camera info struct to be filled in with camera capabilities
7144  *
7145  * RETURN     : int type of status
7146  *              NO_ERROR  -- success
7147  *              none-zero failure code
7148  *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)7149 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7150         struct camera_info *info)
7151 {
7152     ATRACE_CALL();
7153     int rc = 0;
7154 
7155     pthread_mutex_lock(&gCamLock);
7156     if (NULL == gCamCapability[cameraId]) {
7157         rc = initCapabilities(cameraId);
7158         if (rc < 0) {
7159             pthread_mutex_unlock(&gCamLock);
7160             return rc;
7161         }
7162     }
7163 
7164     if (NULL == gStaticMetadata[cameraId]) {
7165         rc = initStaticMetadata(cameraId);
7166         if (rc < 0) {
7167             pthread_mutex_unlock(&gCamLock);
7168             return rc;
7169         }
7170     }
7171 
7172     switch(gCamCapability[cameraId]->position) {
7173     case CAM_POSITION_BACK:
7174         info->facing = CAMERA_FACING_BACK;
7175         break;
7176 
7177     case CAM_POSITION_FRONT:
7178         info->facing = CAMERA_FACING_FRONT;
7179         break;
7180 
7181     default:
7182         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
7183         rc = -1;
7184         break;
7185     }
7186 
7187 
7188     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7189     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7190     info->static_camera_characteristics = gStaticMetadata[cameraId];
7191 
7192     //For now assume both cameras can operate independently.
7193     info->conflicting_devices = NULL;
7194     info->conflicting_devices_length = 0;
7195 
7196     //resource cost is 100 * MIN(1.0, m/M),
7197     //where m is throughput requirement with maximum stream configuration
7198     //and M is CPP maximum throughput.
7199     float max_fps = 0.0;
7200     for (uint32_t i = 0;
7201             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7202         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7203             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7204     }
7205     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7206             gCamCapability[cameraId]->active_array_size.width *
7207             gCamCapability[cameraId]->active_array_size.height * max_fps /
7208             gCamCapability[cameraId]->max_pixel_bandwidth;
7209     info->resource_cost = 100 * MIN(1.0, ratio);
7210     ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
7211             info->resource_cost);
7212 
7213     pthread_mutex_unlock(&gCamLock);
7214     return rc;
7215 }
7216 
7217 /*===========================================================================
7218  * FUNCTION   : translateCapabilityToMetadata
7219  *
7220  * DESCRIPTION: translate the capability into camera_metadata_t
7221  *
7222  * PARAMETERS : type of the request
7223  *
7224  *
7225  * RETURN     : success: camera_metadata_t*
7226  *              failure: NULL
7227  *
7228  *==========================================================================*/
translateCapabilityToMetadata(int type)7229 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7230 {
7231     if (mDefaultMetadata[type] != NULL) {
7232         return mDefaultMetadata[type];
7233     }
7234     //first time we are handling this request
7235     //fill up the metadata structure using the wrapper class
7236     CameraMetadata settings;
7237     //translate from cam_capability_t to camera_metadata_tag_t
7238     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7239     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7240     int32_t defaultRequestID = 0;
7241     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7242 
7243     /* OIS disable */
7244     char ois_prop[PROPERTY_VALUE_MAX];
7245     memset(ois_prop, 0, sizeof(ois_prop));
7246     property_get("persist.camera.ois.disable", ois_prop, "0");
7247     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7248 
7249     /* Force video to use OIS */
7250     char videoOisProp[PROPERTY_VALUE_MAX];
7251     memset(videoOisProp, 0, sizeof(videoOisProp));
7252     property_get("persist.camera.ois.video", videoOisProp, "1");
7253     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7254 
7255     // EIS enable/disable
7256     char eis_prop[PROPERTY_VALUE_MAX];
7257     memset(eis_prop, 0, sizeof(eis_prop));
7258     property_get("persist.camera.eis.enable", eis_prop, "0");
7259     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7260 
7261     // Hybrid AE enable/disable
7262     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7263     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7264     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7265     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7266 
7267     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7268     // This is a bit hacky. EIS is enabled only when the above setprop
7269     // is set to non-zero value and on back camera (for 2015 Nexus).
7270     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7271     // configureStream is called before this function. In other words,
7272     // we cannot guarantee the app will call configureStream before
7273     // calling createDefaultRequest.
7274     const bool eisEnabled = facingBack && eis_prop_set;
7275 
7276     uint8_t controlIntent = 0;
7277     uint8_t focusMode;
7278     uint8_t vsMode;
7279     uint8_t optStabMode;
7280     uint8_t cacMode;
7281     uint8_t edge_mode;
7282     uint8_t noise_red_mode;
7283     uint8_t tonemap_mode;
7284     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7285     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7286     switch (type) {
7287       case CAMERA3_TEMPLATE_PREVIEW:
7288         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7289         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7290         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7291         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7292         edge_mode = ANDROID_EDGE_MODE_FAST;
7293         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7294         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7295         break;
7296       case CAMERA3_TEMPLATE_STILL_CAPTURE:
7297         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7298         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7299         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7300         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7301         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7302         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7303         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7304         break;
7305       case CAMERA3_TEMPLATE_VIDEO_RECORD:
7306         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7307         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7308         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7309         if (eisEnabled) {
7310             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7311         }
7312         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7313         edge_mode = ANDROID_EDGE_MODE_FAST;
7314         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7315         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7316         if (forceVideoOis)
7317             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7318         break;
7319       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7320         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7321         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7322         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7323         if (eisEnabled) {
7324             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7325         }
7326         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7327         edge_mode = ANDROID_EDGE_MODE_FAST;
7328         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7329         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7330         if (forceVideoOis)
7331             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7332         break;
7333       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7334         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7335         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7336         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7337         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7338         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7339         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7340         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7341         break;
7342       case CAMERA3_TEMPLATE_MANUAL:
7343         edge_mode = ANDROID_EDGE_MODE_FAST;
7344         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7345         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7346         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7347         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7348         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7349         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7350         break;
7351       default:
7352         edge_mode = ANDROID_EDGE_MODE_FAST;
7353         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7354         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7355         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7356         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7357         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7358         break;
7359     }
7360     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7361     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7362     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7363     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7364         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7365     }
7366     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7367 
7368     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7369             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7370         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7371     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7372             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7373             || ois_disable)
7374         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7375     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7376 
7377     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7378             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7379 
7380     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7381     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7382 
7383     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7384     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7385 
7386     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7387     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7388 
7389     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7390     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7391 
7392     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7393     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7394 
7395     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7396     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7397 
7398     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7399     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7400 
7401     /*flash*/
7402     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7403     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7404 
7405     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7406     settings.update(ANDROID_FLASH_FIRING_POWER,
7407             &flashFiringLevel, 1);
7408 
7409     /* lens */
7410     float default_aperture = gCamCapability[mCameraId]->apertures[0];
7411     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7412 
7413     if (gCamCapability[mCameraId]->filter_densities_count) {
7414         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7415         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7416                         gCamCapability[mCameraId]->filter_densities_count);
7417     }
7418 
7419     float default_focal_length = gCamCapability[mCameraId]->focal_length;
7420     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7421 
7422     float default_focus_distance = 0;
7423     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7424 
7425     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7426     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7427 
7428     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7429     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7430 
7431     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7432     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7433 
7434     /* face detection (default to OFF) */
7435     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7436     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7437 
7438     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7439     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7440 
7441     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7442     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7443 
7444     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7445     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7446 
7447     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7448     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7449 
7450     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7451     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7452 
7453     /* Exposure time(Update the Min Exposure Time)*/
7454     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7455     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7456 
7457     /* frame duration */
7458     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7459     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7460 
7461     /* sensitivity */
7462     static const int32_t default_sensitivity = 100;
7463     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7464 
7465     /*edge mode*/
7466     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7467 
7468     /*noise reduction mode*/
7469     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7470 
7471     /*color correction mode*/
7472     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7473     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7474 
7475     /*transform matrix mode*/
7476     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7477 
7478     int32_t scaler_crop_region[4];
7479     scaler_crop_region[0] = 0;
7480     scaler_crop_region[1] = 0;
7481     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7482     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7483     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7484 
7485     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7486     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7487 
7488     /*focus distance*/
7489     float focus_distance = 0.0;
7490     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7491 
7492     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7493     float max_range = 0.0;
7494     float max_fixed_fps = 0.0;
7495     int32_t fps_range[2] = {0, 0};
7496     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7497             i++) {
7498         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7499             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7500         if (type == CAMERA3_TEMPLATE_PREVIEW ||
7501                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7502                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7503             if (range > max_range) {
7504                 fps_range[0] =
7505                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7506                 fps_range[1] =
7507                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7508                 max_range = range;
7509             }
7510         } else {
7511             if (range < 0.01 && max_fixed_fps <
7512                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7513                 fps_range[0] =
7514                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7515                 fps_range[1] =
7516                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7517                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7518             }
7519         }
7520     }
7521     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7522 
7523     /*precapture trigger*/
7524     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7525     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7526 
7527     /*af trigger*/
7528     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7529     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7530 
7531     /* ae & af regions */
7532     int32_t active_region[] = {
7533             gCamCapability[mCameraId]->active_array_size.left,
7534             gCamCapability[mCameraId]->active_array_size.top,
7535             gCamCapability[mCameraId]->active_array_size.left +
7536                     gCamCapability[mCameraId]->active_array_size.width,
7537             gCamCapability[mCameraId]->active_array_size.top +
7538                     gCamCapability[mCameraId]->active_array_size.height,
7539             0};
7540     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7541             sizeof(active_region) / sizeof(active_region[0]));
7542     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7543             sizeof(active_region) / sizeof(active_region[0]));
7544 
7545     /* black level lock */
7546     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7547     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7548 
7549     /* lens shading map mode */
7550     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7551     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7552         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7553     }
7554     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7555 
7556     //special defaults for manual template
7557     if (type == CAMERA3_TEMPLATE_MANUAL) {
7558         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7559         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7560 
7561         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7562         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7563 
7564         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7565         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7566 
7567         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7568         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7569 
7570         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7571         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7572 
7573         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7574         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7575     }
7576 
7577 
7578     /* TNR
7579      * We'll use this location to determine which modes TNR will be set.
7580      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7581      * This is not to be confused with linking on a per stream basis that decision
7582      * is still on per-session basis and will be handled as part of config stream
7583      */
7584     uint8_t tnr_enable = 0;
7585 
7586     if (m_bTnrPreview || m_bTnrVideo) {
7587 
7588         switch (type) {
7589             case CAMERA3_TEMPLATE_VIDEO_RECORD:
7590             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7591                     tnr_enable = 1;
7592                     break;
7593 
7594             default:
7595                     tnr_enable = 0;
7596                     break;
7597         }
7598 
7599         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7600         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7601         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7602 
7603         CDBG("%s: TNR:%d with process plate %d for template:%d",
7604                             __func__, tnr_enable, tnr_process_type, type);
7605     }
7606 
7607     /* CDS default */
7608     char prop[PROPERTY_VALUE_MAX];
7609     memset(prop, 0, sizeof(prop));
7610     property_get("persist.camera.CDS", prop, "Auto");
7611     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7612     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7613     if (CAM_CDS_MODE_MAX == cds_mode) {
7614         cds_mode = CAM_CDS_MODE_AUTO;
7615     }
7616     m_CdsPreference = cds_mode;
7617 
7618     /* Disabling CDS in templates which have TNR enabled*/
7619     if (tnr_enable)
7620         cds_mode = CAM_CDS_MODE_OFF;
7621 
7622     int32_t mode = cds_mode;
7623     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7624 
7625     /* hybrid ae */
7626     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7627 
7628     mDefaultMetadata[type] = settings.release();
7629 
7630     return mDefaultMetadata[type];
7631 }
7632 
7633 /*===========================================================================
7634  * FUNCTION   : setFrameParameters
7635  *
7636  * DESCRIPTION: set parameters per frame as requested in the metadata from
7637  *              framework
7638  *
7639  * PARAMETERS :
7640  *   @request   : request that needs to be serviced
7641  *   @streamID : Stream ID of all the requested streams
7642  *   @blob_request: Whether this request is a blob request or not
7643  *
7644  * RETURN     : success: NO_ERROR
7645  *              failure:
7646  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,int blob_request,uint32_t snapshotStreamId)7647 int QCamera3HardwareInterface::setFrameParameters(
7648                     camera3_capture_request_t *request,
7649                     cam_stream_ID_t streamID,
7650                     int blob_request,
7651                     uint32_t snapshotStreamId)
7652 {
7653     /*translate from camera_metadata_t type to parm_type_t*/
7654     int rc = 0;
7655     int32_t hal_version = CAM_HAL_V3;
7656 
7657     clear_metadata_buffer(mParameters);
7658     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7659         ALOGE("%s: Failed to set hal version in the parameters", __func__);
7660         return BAD_VALUE;
7661     }
7662 
7663     /*we need to update the frame number in the parameters*/
7664     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7665             request->frame_number)) {
7666         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7667         return BAD_VALUE;
7668     }
7669 
7670     /* Update stream id of all the requested buffers */
7671     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7672         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7673         return BAD_VALUE;
7674     }
7675 
7676     if (mUpdateDebugLevel) {
7677         uint32_t dummyDebugLevel = 0;
7678         /* The value of dummyDebugLevel is irrelavent. On
7679          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7680         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7681                 dummyDebugLevel)) {
7682             ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7683             return BAD_VALUE;
7684         }
7685         mUpdateDebugLevel = false;
7686     }
7687 
7688     if(request->settings != NULL){
7689         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7690         if (blob_request)
7691             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7692     }
7693 
7694     return rc;
7695 }
7696 
7697 /*===========================================================================
7698  * FUNCTION   : setReprocParameters
7699  *
7700  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7701  *              return it.
7702  *
7703  * PARAMETERS :
7704  *   @request   : request that needs to be serviced
7705  *
7706  * RETURN     : success: NO_ERROR
7707  *              failure:
7708  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)7709 int32_t QCamera3HardwareInterface::setReprocParameters(
7710         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7711         uint32_t snapshotStreamId)
7712 {
7713     /*translate from camera_metadata_t type to parm_type_t*/
7714     int rc = 0;
7715 
7716     if (NULL == request->settings){
7717         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7718         return BAD_VALUE;
7719     }
7720 
7721     if (NULL == reprocParam) {
7722         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7723         return BAD_VALUE;
7724     }
7725     clear_metadata_buffer(reprocParam);
7726 
7727     /*we need to update the frame number in the parameters*/
7728     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7729             request->frame_number)) {
7730         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7731         return BAD_VALUE;
7732     }
7733 
7734     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7735     if (rc < 0) {
7736         ALOGE("%s: Failed to translate reproc request", __func__);
7737         return rc;
7738     }
7739 
7740     CameraMetadata frame_settings;
7741     frame_settings = request->settings;
7742     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7743             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7744         int32_t *crop_count =
7745                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7746         int32_t *crop_data =
7747                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7748         int32_t *roi_map =
7749                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7750         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7751             cam_crop_data_t crop_meta;
7752             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7753             crop_meta.num_of_streams = 1;
7754             crop_meta.crop_info[0].crop.left   = crop_data[0];
7755             crop_meta.crop_info[0].crop.top    = crop_data[1];
7756             crop_meta.crop_info[0].crop.width  = crop_data[2];
7757             crop_meta.crop_info[0].crop.height = crop_data[3];
7758 
7759             crop_meta.crop_info[0].roi_map.left =
7760                     roi_map[0];
7761             crop_meta.crop_info[0].roi_map.top =
7762                     roi_map[1];
7763             crop_meta.crop_info[0].roi_map.width =
7764                     roi_map[2];
7765             crop_meta.crop_info[0].roi_map.height =
7766                     roi_map[3];
7767 
7768             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7769                 rc = BAD_VALUE;
7770             }
7771             CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7772                     __func__,
7773                     request->input_buffer->stream,
7774                     crop_meta.crop_info[0].crop.left,
7775                     crop_meta.crop_info[0].crop.top,
7776                     crop_meta.crop_info[0].crop.width,
7777                     crop_meta.crop_info[0].crop.height);
7778             CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7779                     __func__,
7780                     request->input_buffer->stream,
7781                     crop_meta.crop_info[0].roi_map.left,
7782                     crop_meta.crop_info[0].roi_map.top,
7783                     crop_meta.crop_info[0].roi_map.width,
7784                     crop_meta.crop_info[0].roi_map.height);
7785             } else {
7786                 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7787             }
7788     } else {
7789         ALOGE("%s: No crop data from matching output stream", __func__);
7790     }
7791 
7792     /* These settings are not needed for regular requests so handle them specially for
7793        reprocess requests; information needed for EXIF tags */
7794     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7795         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7796                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7797         if (NAME_NOT_FOUND != val) {
7798             uint32_t flashMode = (uint32_t)val;
7799             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7800                 rc = BAD_VALUE;
7801             }
7802         } else {
7803             ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7804                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7805         }
7806     } else {
7807         CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7808     }
7809 
7810     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7811         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7812         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7813             rc = BAD_VALUE;
7814         }
7815     } else {
7816         CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7817     }
7818 
7819     return rc;
7820 }
7821 
7822 /*===========================================================================
7823  * FUNCTION   : saveRequestSettings
7824  *
7825  * DESCRIPTION: Add any settings that might have changed to the request settings
7826  *              and save the settings to be applied on the frame
7827  *
7828  * PARAMETERS :
7829  *   @jpegMetadata : the extracted and/or modified jpeg metadata
7830  *   @request      : request with initial settings
7831  *
7832  * RETURN     :
7833  * camera_metadata_t* : pointer to the saved request settings
7834  *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)7835 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7836         const CameraMetadata &jpegMetadata,
7837         camera3_capture_request_t *request)
7838 {
7839     camera_metadata_t *resultMetadata;
7840     CameraMetadata camMetadata;
7841     camMetadata = request->settings;
7842 
7843     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7844         int32_t thumbnail_size[2];
7845         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7846         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7847         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7848                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7849     }
7850 
7851     resultMetadata = camMetadata.release();
7852     return resultMetadata;
7853 }
7854 
7855 /*===========================================================================
7856  * FUNCTION   : setHalFpsRange
7857  *
7858  * DESCRIPTION: set FPS range parameter
7859  *
7860  *
7861  * PARAMETERS :
7862  *   @settings    : Metadata from framework
7863  *   @hal_metadata: Metadata buffer
7864  *
7865  *
7866  * RETURN     : success: NO_ERROR
7867  *              failure:
7868  *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)7869 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7870         metadata_buffer_t *hal_metadata)
7871 {
7872     int32_t rc = NO_ERROR;
7873     cam_fps_range_t fps_range;
7874     fps_range.min_fps = (float)
7875             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7876     fps_range.max_fps = (float)
7877             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7878     fps_range.video_min_fps = fps_range.min_fps;
7879     fps_range.video_max_fps = fps_range.max_fps;
7880 
7881     CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7882             fps_range.min_fps, fps_range.max_fps);
7883     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7884      * follows:
7885      * ---------------------------------------------------------------|
7886      *      Video stream is absent in configure_streams               |
7887      *    (Camcorder preview before the first video record            |
7888      * ---------------------------------------------------------------|
7889      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7890      *                   |             |             | vid_min/max_fps|
7891      * ---------------------------------------------------------------|
7892      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7893      *                   |-------------|-------------|----------------|
7894      *                   |  [240, 240] |     240     |  [240, 240]    |
7895      * ---------------------------------------------------------------|
7896      *     Video stream is present in configure_streams               |
7897      * ---------------------------------------------------------------|
7898      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7899      *                   |             |             | vid_min/max_fps|
7900      * ---------------------------------------------------------------|
7901      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7902      * (camcorder prev   |-------------|-------------|----------------|
7903      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7904      *  is stopped)      |             |             |                |
7905      * ---------------------------------------------------------------|
7906      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7907      *                   |-------------|-------------|----------------|
7908      *                   |  [240, 240] |     240     |  [240, 240]    |
7909      * ---------------------------------------------------------------|
7910      * When Video stream is absent in configure_streams,
7911      * preview fps = sensor_fps / batchsize
7912      * Eg: for 240fps at batchSize 4, preview = 60fps
7913      *     for 120fps at batchSize 4, preview = 30fps
7914      *
7915      * When video stream is present in configure_streams, preview fps is as per
7916      * the ratio of preview buffers to video buffers requested in process
7917      * capture request
7918      */
7919     mBatchSize = 0;
7920     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7921         fps_range.min_fps = fps_range.video_max_fps;
7922         fps_range.video_min_fps = fps_range.video_max_fps;
7923         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7924                 fps_range.max_fps);
7925         if (NAME_NOT_FOUND != val) {
7926             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7927             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7928                 return BAD_VALUE;
7929             }
7930 
7931             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7932                 /* If batchmode is currently in progress and the fps changes,
7933                  * set the flag to restart the sensor */
7934                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7935                         (mHFRVideoFps != fps_range.max_fps)) {
7936                     mNeedSensorRestart = true;
7937                 }
7938                 mHFRVideoFps = fps_range.max_fps;
7939                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7940                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7941                     mBatchSize = MAX_HFR_BATCH_SIZE;
7942                 }
7943              }
7944             CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7945 
7946          }
7947     } else {
7948         /* HFR mode is session param in backend/ISP. This should be reset when
7949          * in non-HFR mode  */
7950         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7951         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7952             return BAD_VALUE;
7953         }
7954     }
7955     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7956         return BAD_VALUE;
7957     }
7958     CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7959             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7960     return rc;
7961 }
7962 
7963 /*===========================================================================
7964  * FUNCTION   : translateToHalMetadata
7965  *
7966  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7967  *
7968  *
7969  * PARAMETERS :
7970  *   @request  : request sent from framework
7971  *
7972  *
7973  * RETURN     : success: NO_ERROR
7974  *              failure:
7975  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)7976 int QCamera3HardwareInterface::translateToHalMetadata
7977                                   (const camera3_capture_request_t *request,
7978                                    metadata_buffer_t *hal_metadata,
7979                                    uint32_t snapshotStreamId)
7980 {
7981     int rc = 0;
7982     CameraMetadata frame_settings;
7983     frame_settings = request->settings;
7984 
7985     /* Do not change the order of the following list unless you know what you are
7986      * doing.
7987      * The order is laid out in such a way that parameters in the front of the table
7988      * may be used to override the parameters later in the table. Examples are:
7989      * 1. META_MODE should precede AEC/AWB/AF MODE
7990      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7991      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7992      * 4. Any mode should precede it's corresponding settings
7993      */
7994     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7995         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7996         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7997             rc = BAD_VALUE;
7998         }
7999         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8000         if (rc != NO_ERROR) {
8001             ALOGE("%s: extractSceneMode failed", __func__);
8002         }
8003     }
8004 
8005     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8006         uint8_t fwk_aeMode =
8007             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8008         uint8_t aeMode;
8009         int32_t redeye;
8010 
8011         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8012             aeMode = CAM_AE_MODE_OFF;
8013         } else {
8014             aeMode = CAM_AE_MODE_ON;
8015         }
8016         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8017             redeye = 1;
8018         } else {
8019             redeye = 0;
8020         }
8021 
8022         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8023                 fwk_aeMode);
8024         if (NAME_NOT_FOUND != val) {
8025             int32_t flashMode = (int32_t)val;
8026             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8027         }
8028 
8029         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8030         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8031             rc = BAD_VALUE;
8032         }
8033     }
8034 
8035     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8036         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8037         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8038                 fwk_whiteLevel);
8039         if (NAME_NOT_FOUND != val) {
8040             uint8_t whiteLevel = (uint8_t)val;
8041             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8042                 rc = BAD_VALUE;
8043             }
8044         }
8045     }
8046 
8047     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8048         uint8_t fwk_cacMode =
8049                 frame_settings.find(
8050                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8051         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8052                 fwk_cacMode);
8053         if (NAME_NOT_FOUND != val) {
8054             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8055             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8056                 rc = BAD_VALUE;
8057             }
8058         } else {
8059             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
8060         }
8061     }
8062 
8063     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8064         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8065         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8066                 fwk_focusMode);
8067         if (NAME_NOT_FOUND != val) {
8068             uint8_t focusMode = (uint8_t)val;
8069             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8070                 rc = BAD_VALUE;
8071             }
8072         }
8073     }
8074 
8075     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8076         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8077         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8078                 focalDistance)) {
8079             rc = BAD_VALUE;
8080         }
8081     }
8082 
8083     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8084         uint8_t fwk_antibandingMode =
8085                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8086         int val = lookupHalName(ANTIBANDING_MODES_MAP,
8087                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8088         if (NAME_NOT_FOUND != val) {
8089             uint32_t hal_antibandingMode = (uint32_t)val;
8090             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8091                     hal_antibandingMode)) {
8092                 rc = BAD_VALUE;
8093             }
8094         }
8095     }
8096 
8097     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8098         int32_t expCompensation = frame_settings.find(
8099                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8100         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8101             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8102         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8103             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8104         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8105                 expCompensation)) {
8106             rc = BAD_VALUE;
8107         }
8108     }
8109 
8110     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8111         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8112         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8113             rc = BAD_VALUE;
8114         }
8115     }
8116     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8117         rc = setHalFpsRange(frame_settings, hal_metadata);
8118         if (rc != NO_ERROR) {
8119             ALOGE("%s: setHalFpsRange failed", __func__);
8120         }
8121     }
8122 
8123     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8124         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8125         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8126             rc = BAD_VALUE;
8127         }
8128     }
8129 
8130     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8131         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8132         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8133                 fwk_effectMode);
8134         if (NAME_NOT_FOUND != val) {
8135             uint8_t effectMode = (uint8_t)val;
8136             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8137                 rc = BAD_VALUE;
8138             }
8139         }
8140     }
8141 
8142     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8143         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8144         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8145                 colorCorrectMode)) {
8146             rc = BAD_VALUE;
8147         }
8148     }
8149 
8150     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8151         cam_color_correct_gains_t colorCorrectGains;
8152         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8153             colorCorrectGains.gains[i] =
8154                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8155         }
8156         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8157                 colorCorrectGains)) {
8158             rc = BAD_VALUE;
8159         }
8160     }
8161 
8162     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8163         cam_color_correct_matrix_t colorCorrectTransform;
8164         cam_rational_type_t transform_elem;
8165         size_t num = 0;
8166         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8167            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8168               transform_elem.numerator =
8169                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8170               transform_elem.denominator =
8171                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8172               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8173               num++;
8174            }
8175         }
8176         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8177                 colorCorrectTransform)) {
8178             rc = BAD_VALUE;
8179         }
8180     }
8181 
8182     cam_trigger_t aecTrigger;
8183     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8184     aecTrigger.trigger_id = -1;
8185     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8186         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8187         aecTrigger.trigger =
8188             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8189         aecTrigger.trigger_id =
8190             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8191         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8192                 aecTrigger)) {
8193             rc = BAD_VALUE;
8194         }
8195         CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
8196                 aecTrigger.trigger, aecTrigger.trigger_id);
8197     }
8198 
8199     /*af_trigger must come with a trigger id*/
8200     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8201         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8202         cam_trigger_t af_trigger;
8203         af_trigger.trigger =
8204             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8205         af_trigger.trigger_id =
8206             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8207         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8208             rc = BAD_VALUE;
8209         }
8210         CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
8211                 af_trigger.trigger, af_trigger.trigger_id);
8212     }
8213 
8214     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8215         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8216         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8217             rc = BAD_VALUE;
8218         }
8219     }
8220     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8221         cam_edge_application_t edge_application;
8222         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8223         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8224             edge_application.sharpness = 0;
8225         } else {
8226             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8227         }
8228         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8229             rc = BAD_VALUE;
8230         }
8231     }
8232 
8233     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8234         int32_t respectFlashMode = 1;
8235         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8236             uint8_t fwk_aeMode =
8237                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8238             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8239                 respectFlashMode = 0;
8240                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
8241                     __func__);
8242             }
8243         }
8244         if (respectFlashMode) {
8245             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8246                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8247             CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
8248             // To check: CAM_INTF_META_FLASH_MODE usage
8249             if (NAME_NOT_FOUND != val) {
8250                 uint8_t flashMode = (uint8_t)val;
8251                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8252                     rc = BAD_VALUE;
8253                 }
8254             }
8255         }
8256     }
8257 
8258     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8259         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8260         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8261             rc = BAD_VALUE;
8262         }
8263     }
8264 
8265     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8266         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8267         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8268                 flashFiringTime)) {
8269             rc = BAD_VALUE;
8270         }
8271     }
8272 
8273     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8274         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8275         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8276                 hotPixelMode)) {
8277             rc = BAD_VALUE;
8278         }
8279     }
8280 
8281     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8282         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8283         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8284                 lensAperture)) {
8285             rc = BAD_VALUE;
8286         }
8287     }
8288 
8289     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8290         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8291         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8292                 filterDensity)) {
8293             rc = BAD_VALUE;
8294         }
8295     }
8296 
8297     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8298         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8299         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8300                 focalLength)) {
8301             rc = BAD_VALUE;
8302         }
8303     }
8304 
8305     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8306         uint8_t optStabMode =
8307                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8308         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8309                 optStabMode)) {
8310             rc = BAD_VALUE;
8311         }
8312     }
8313 
8314     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8315         uint8_t videoStabMode =
8316                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8317         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8318                 videoStabMode)) {
8319             rc = BAD_VALUE;
8320         }
8321     }
8322 
8323 
8324     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8325         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8326         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8327                 noiseRedMode)) {
8328             rc = BAD_VALUE;
8329         }
8330     }
8331 
8332     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8333         float reprocessEffectiveExposureFactor =
8334             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8335         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8336                 reprocessEffectiveExposureFactor)) {
8337             rc = BAD_VALUE;
8338         }
8339     }
8340 
8341     cam_crop_region_t scalerCropRegion;
8342     bool scalerCropSet = false;
8343     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8344         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8345         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8346         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8347         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8348 
8349         // Map coordinate system from active array to sensor output.
8350         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8351                 scalerCropRegion.width, scalerCropRegion.height);
8352 
8353         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8354                 scalerCropRegion)) {
8355             rc = BAD_VALUE;
8356         }
8357         scalerCropSet = true;
8358     }
8359 
8360     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8361         int64_t sensorExpTime =
8362                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8363         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8364         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8365                 sensorExpTime)) {
8366             rc = BAD_VALUE;
8367         }
8368     }
8369 
8370     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8371         int64_t sensorFrameDuration =
8372                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8373         int64_t minFrameDuration = getMinFrameDuration(request);
8374         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8375         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8376             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8377         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8378         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8379                 sensorFrameDuration)) {
8380             rc = BAD_VALUE;
8381         }
8382     }
8383 
8384     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8385         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8386         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8387                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8388         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8389                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8390         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8391         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8392                 sensorSensitivity)) {
8393             rc = BAD_VALUE;
8394         }
8395     }
8396 
8397     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8398         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8399         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8400             rc = BAD_VALUE;
8401         }
8402     }
8403 
8404     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8405         uint8_t fwk_facedetectMode =
8406                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8407 
8408         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8409                 fwk_facedetectMode);
8410 
8411         if (NAME_NOT_FOUND != val) {
8412             uint8_t facedetectMode = (uint8_t)val;
8413             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8414                     facedetectMode)) {
8415                 rc = BAD_VALUE;
8416             }
8417         }
8418     }
8419 
8420     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8421         uint8_t histogramMode =
8422                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8423         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8424                 histogramMode)) {
8425             rc = BAD_VALUE;
8426         }
8427     }
8428 
8429     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8430         uint8_t sharpnessMapMode =
8431                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8432         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8433                 sharpnessMapMode)) {
8434             rc = BAD_VALUE;
8435         }
8436     }
8437 
8438     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8439         uint8_t tonemapMode =
8440                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8441         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8442             rc = BAD_VALUE;
8443         }
8444     }
8445     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8446     /*All tonemap channels will have the same number of points*/
8447     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8448         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8449         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8450         cam_rgb_tonemap_curves tonemapCurves;
8451         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8452         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8453             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8454                     __func__, tonemapCurves.tonemap_points_cnt,
8455                     CAM_MAX_TONEMAP_CURVE_SIZE);
8456             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8457         }
8458 
8459         /* ch0 = G*/
8460         size_t point = 0;
8461         cam_tonemap_curve_t tonemapCurveGreen;
8462         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8463             for (size_t j = 0; j < 2; j++) {
8464                tonemapCurveGreen.tonemap_points[i][j] =
8465                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8466                point++;
8467             }
8468         }
8469         tonemapCurves.curves[0] = tonemapCurveGreen;
8470 
8471         /* ch 1 = B */
8472         point = 0;
8473         cam_tonemap_curve_t tonemapCurveBlue;
8474         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8475             for (size_t j = 0; j < 2; j++) {
8476                tonemapCurveBlue.tonemap_points[i][j] =
8477                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8478                point++;
8479             }
8480         }
8481         tonemapCurves.curves[1] = tonemapCurveBlue;
8482 
8483         /* ch 2 = R */
8484         point = 0;
8485         cam_tonemap_curve_t tonemapCurveRed;
8486         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8487             for (size_t j = 0; j < 2; j++) {
8488                tonemapCurveRed.tonemap_points[i][j] =
8489                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8490                point++;
8491             }
8492         }
8493         tonemapCurves.curves[2] = tonemapCurveRed;
8494 
8495         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8496                 tonemapCurves)) {
8497             rc = BAD_VALUE;
8498         }
8499     }
8500 
8501     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8502         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8503         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8504                 captureIntent)) {
8505             rc = BAD_VALUE;
8506         }
8507     }
8508 
8509     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8510         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8511         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8512                 blackLevelLock)) {
8513             rc = BAD_VALUE;
8514         }
8515     }
8516 
8517     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8518         uint8_t lensShadingMapMode =
8519                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8520         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8521                 lensShadingMapMode)) {
8522             rc = BAD_VALUE;
8523         }
8524     }
8525 
8526     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8527         cam_area_t roi;
8528         bool reset = true;
8529         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8530 
8531         // Map coordinate system from active array to sensor output.
8532         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8533                 roi.rect.height);
8534 
8535         if (scalerCropSet) {
8536             reset = resetIfNeededROI(&roi, &scalerCropRegion);
8537         }
8538         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8539             rc = BAD_VALUE;
8540         }
8541     }
8542 
8543     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8544         cam_area_t roi;
8545         bool reset = true;
8546         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8547 
8548         // Map coordinate system from active array to sensor output.
8549         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8550                 roi.rect.height);
8551 
8552         if (scalerCropSet) {
8553             reset = resetIfNeededROI(&roi, &scalerCropRegion);
8554         }
8555         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8556             rc = BAD_VALUE;
8557         }
8558     }
8559 
8560     if (m_bIs4KVideo) {
8561         /* Override needed for Video template in case of 4K video */
8562         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8563                 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8564             rc = BAD_VALUE;
8565         }
8566     } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8567             frame_settings.exists(QCAMERA3_CDS_MODE)) {
8568         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8569         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8570             ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8571         } else {
8572             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8573                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8574                 rc = BAD_VALUE;
8575             }
8576         }
8577     }
8578 
8579     // TNR
8580     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8581         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8582         uint8_t b_TnrRequested = 0;
8583         cam_denoise_param_t tnr;
8584         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8585         tnr.process_plates =
8586             (cam_denoise_process_type_t)frame_settings.find(
8587             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8588         b_TnrRequested = tnr.denoise_enable;
8589         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8590             rc = BAD_VALUE;
8591         }
8592     }
8593 
8594     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8595         int32_t fwk_testPatternMode =
8596                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8597         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8598                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8599 
8600         if (NAME_NOT_FOUND != testPatternMode) {
8601             cam_test_pattern_data_t testPatternData;
8602             memset(&testPatternData, 0, sizeof(testPatternData));
8603             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8604             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8605                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8606                 int32_t *fwk_testPatternData =
8607                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8608                 testPatternData.r = fwk_testPatternData[0];
8609                 testPatternData.b = fwk_testPatternData[3];
8610                 switch (gCamCapability[mCameraId]->color_arrangement) {
8611                     case CAM_FILTER_ARRANGEMENT_RGGB:
8612                     case CAM_FILTER_ARRANGEMENT_GRBG:
8613                         testPatternData.gr = fwk_testPatternData[1];
8614                         testPatternData.gb = fwk_testPatternData[2];
8615                         break;
8616                     case CAM_FILTER_ARRANGEMENT_GBRG:
8617                     case CAM_FILTER_ARRANGEMENT_BGGR:
8618                         testPatternData.gr = fwk_testPatternData[2];
8619                         testPatternData.gb = fwk_testPatternData[1];
8620                         break;
8621                     default:
8622                         ALOGE("%s: color arrangement %d is not supported", __func__,
8623                                 gCamCapability[mCameraId]->color_arrangement);
8624                         break;
8625                 }
8626             }
8627             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8628                     testPatternData)) {
8629                 rc = BAD_VALUE;
8630             }
8631         } else {
8632             ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8633                     fwk_testPatternMode);
8634         }
8635     }
8636 
8637     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8638         size_t count = 0;
8639         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8640         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8641                 gps_coords.data.d, gps_coords.count, count);
8642         if (gps_coords.count != count) {
8643             rc = BAD_VALUE;
8644         }
8645     }
8646 
8647     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8648         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8649         size_t count = 0;
8650         const char *gps_methods_src = (const char *)
8651                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8652         memset(gps_methods, '\0', sizeof(gps_methods));
8653         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8654         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8655                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8656         if (GPS_PROCESSING_METHOD_SIZE != count) {
8657             rc = BAD_VALUE;
8658         }
8659     }
8660 
8661     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8662         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8663         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8664                 gps_timestamp)) {
8665             rc = BAD_VALUE;
8666         }
8667     }
8668 
8669     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8670         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8671         cam_rotation_info_t rotation_info;
8672         if (orientation == 0) {
8673            rotation_info.rotation = ROTATE_0;
8674         } else if (orientation == 90) {
8675            rotation_info.rotation = ROTATE_90;
8676         } else if (orientation == 180) {
8677            rotation_info.rotation = ROTATE_180;
8678         } else if (orientation == 270) {
8679            rotation_info.rotation = ROTATE_270;
8680         }
8681         rotation_info.streamId = snapshotStreamId;
8682         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8683         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8684             rc = BAD_VALUE;
8685         }
8686     }
8687 
8688     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8689         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8690         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8691             rc = BAD_VALUE;
8692         }
8693     }
8694 
8695     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8696         uint32_t thumb_quality = (uint32_t)
8697                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8698         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8699                 thumb_quality)) {
8700             rc = BAD_VALUE;
8701         }
8702     }
8703 
8704     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8705         cam_dimension_t dim;
8706         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8707         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8708         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8709             rc = BAD_VALUE;
8710         }
8711     }
8712 
8713     // Internal metadata
8714     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8715         size_t count = 0;
8716         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8717         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8718                 privatedata.data.i32, privatedata.count, count);
8719         if (privatedata.count != count) {
8720             rc = BAD_VALUE;
8721         }
8722     }
8723 
8724     if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8725         uint8_t* use_av_timer =
8726                 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8727         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8728             rc = BAD_VALUE;
8729         }
8730     }
8731 
8732     // EV step
8733     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8734             gCamCapability[mCameraId]->exp_compensation_step)) {
8735         rc = BAD_VALUE;
8736     }
8737 
8738     // CDS info
8739     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8740         cam_cds_data_t *cdsData = (cam_cds_data_t *)
8741                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8742 
8743         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8744                 CAM_INTF_META_CDS_DATA, *cdsData)) {
8745             rc = BAD_VALUE;
8746         }
8747     }
8748 
8749     // Hybrid AE
8750     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
8751         uint8_t *hybrid_ae = (uint8_t *)
8752                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
8753 
8754         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8755                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
8756             rc = BAD_VALUE;
8757         }
8758     }
8759 
8760     return rc;
8761 }
8762 
8763 /*===========================================================================
8764  * FUNCTION   : captureResultCb
8765  *
8766  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8767  *
8768  * PARAMETERS :
8769  *   @frame  : frame information from mm-camera-interface
8770  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8771  *   @userdata: userdata
8772  *
8773  * RETURN     : NONE
8774  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)8775 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8776                 camera3_stream_buffer_t *buffer,
8777                 uint32_t frame_number, bool isInputBuffer, void *userdata)
8778 {
8779     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8780     if (hw == NULL) {
8781         ALOGE("%s: Invalid hw %p", __func__, hw);
8782         return;
8783     }
8784 
8785     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8786     return;
8787 }
8788 
8789 
8790 /*===========================================================================
8791  * FUNCTION   : initialize
8792  *
8793  * DESCRIPTION: Pass framework callback pointers to HAL
8794  *
8795  * PARAMETERS :
8796  *
8797  *
8798  * RETURN     : Success : 0
8799  *              Failure: -ENODEV
8800  *==========================================================================*/
8801 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)8802 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8803                                   const camera3_callback_ops_t *callback_ops)
8804 {
8805     CDBG("%s: E", __func__);
8806     QCamera3HardwareInterface *hw =
8807         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8808     if (!hw) {
8809         ALOGE("%s: NULL camera device", __func__);
8810         return -ENODEV;
8811     }
8812 
8813     int rc = hw->initialize(callback_ops);
8814     CDBG("%s: X", __func__);
8815     return rc;
8816 }
8817 
8818 /*===========================================================================
8819  * FUNCTION   : configure_streams
8820  *
8821  * DESCRIPTION:
8822  *
8823  * PARAMETERS :
8824  *
8825  *
8826  * RETURN     : Success: 0
8827  *              Failure: -EINVAL (if stream configuration is invalid)
8828  *                       -ENODEV (fatal error)
8829  *==========================================================================*/
8830 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)8831 int QCamera3HardwareInterface::configure_streams(
8832         const struct camera3_device *device,
8833         camera3_stream_configuration_t *stream_list)
8834 {
8835     CDBG("%s: E", __func__);
8836     QCamera3HardwareInterface *hw =
8837         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8838     if (!hw) {
8839         ALOGE("%s: NULL camera device", __func__);
8840         return -ENODEV;
8841     }
8842     int rc = hw->configureStreams(stream_list);
8843     CDBG("%s: X", __func__);
8844     return rc;
8845 }
8846 
8847 /*===========================================================================
8848  * FUNCTION   : construct_default_request_settings
8849  *
8850  * DESCRIPTION: Configure a settings buffer to meet the required use case
8851  *
8852  * PARAMETERS :
8853  *
8854  *
8855  * RETURN     : Success: Return valid metadata
8856  *              Failure: Return NULL
8857  *==========================================================================*/
8858 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)8859     construct_default_request_settings(const struct camera3_device *device,
8860                                         int type)
8861 {
8862 
8863     CDBG("%s: E", __func__);
8864     camera_metadata_t* fwk_metadata = NULL;
8865     QCamera3HardwareInterface *hw =
8866         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8867     if (!hw) {
8868         ALOGE("%s: NULL camera device", __func__);
8869         return NULL;
8870     }
8871 
8872     fwk_metadata = hw->translateCapabilityToMetadata(type);
8873 
8874     CDBG("%s: X", __func__);
8875     return fwk_metadata;
8876 }
8877 
8878 /*===========================================================================
8879  * FUNCTION   : process_capture_request
8880  *
8881  * DESCRIPTION:
8882  *
8883  * PARAMETERS :
8884  *
8885  *
8886  * RETURN     :
8887  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)8888 int QCamera3HardwareInterface::process_capture_request(
8889                     const struct camera3_device *device,
8890                     camera3_capture_request_t *request)
8891 {
8892     CDBG("%s: E", __func__);
8893     QCamera3HardwareInterface *hw =
8894         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8895     if (!hw) {
8896         ALOGE("%s: NULL camera device", __func__);
8897         return -EINVAL;
8898     }
8899 
8900     int rc = hw->processCaptureRequest(request);
8901     CDBG("%s: X", __func__);
8902     return rc;
8903 }
8904 
8905 /*===========================================================================
8906  * FUNCTION   : dump
8907  *
8908  * DESCRIPTION:
8909  *
8910  * PARAMETERS :
8911  *
8912  *
8913  * RETURN     :
8914  *==========================================================================*/
8915 
dump(const struct camera3_device * device,int fd)8916 void QCamera3HardwareInterface::dump(
8917                 const struct camera3_device *device, int fd)
8918 {
8919     /* Log level property is read when "adb shell dumpsys media.camera" is
8920        called so that the log level can be controlled without restarting
8921        the media server */
8922     getLogLevel();
8923 
8924     CDBG("%s: E", __func__);
8925     QCamera3HardwareInterface *hw =
8926         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8927     if (!hw) {
8928         ALOGE("%s: NULL camera device", __func__);
8929         return;
8930     }
8931 
8932     hw->dump(fd);
8933     CDBG("%s: X", __func__);
8934     return;
8935 }
8936 
8937 /*===========================================================================
8938  * FUNCTION   : flush
8939  *
8940  * DESCRIPTION:
8941  *
8942  * PARAMETERS :
8943  *
8944  *
8945  * RETURN     :
8946  *==========================================================================*/
8947 
flush(const struct camera3_device * device)8948 int QCamera3HardwareInterface::flush(
8949                 const struct camera3_device *device)
8950 {
8951     int rc;
8952     CDBG("%s: E", __func__);
8953     QCamera3HardwareInterface *hw =
8954         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8955     if (!hw) {
8956         ALOGE("%s: NULL camera device", __func__);
8957         return -EINVAL;
8958     }
8959 
8960     rc = hw->flush();
8961     CDBG("%s: X", __func__);
8962     return rc;
8963 }
8964 
8965 /*===========================================================================
8966  * FUNCTION   : close_camera_device
8967  *
8968  * DESCRIPTION:
8969  *
8970  * PARAMETERS :
8971  *
8972  *
8973  * RETURN     :
8974  *==========================================================================*/
close_camera_device(struct hw_device_t * device)8975 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8976 {
8977     CDBG("%s: E", __func__);
8978     int ret = NO_ERROR;
8979     QCamera3HardwareInterface *hw =
8980         reinterpret_cast<QCamera3HardwareInterface *>(
8981             reinterpret_cast<camera3_device_t *>(device)->priv);
8982     if (!hw) {
8983         ALOGE("NULL camera device");
8984         return BAD_VALUE;
8985     }
8986     delete hw;
8987 
8988     CDBG("%s: X", __func__);
8989     return ret;
8990 }
8991 
8992 /*===========================================================================
8993  * FUNCTION   : getWaveletDenoiseProcessPlate
8994  *
8995  * DESCRIPTION: query wavelet denoise process plate
8996  *
8997  * PARAMETERS : None
8998  *
8999  * RETURN     : WNR prcocess plate value
9000  *==========================================================================*/
getWaveletDenoiseProcessPlate()9001 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9002 {
9003     char prop[PROPERTY_VALUE_MAX];
9004     memset(prop, 0, sizeof(prop));
9005     property_get("persist.denoise.process.plates", prop, "0");
9006     int processPlate = atoi(prop);
9007     switch(processPlate) {
9008     case 0:
9009         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9010     case 1:
9011         return CAM_WAVELET_DENOISE_CBCR_ONLY;
9012     case 2:
9013         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9014     case 3:
9015         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9016     default:
9017         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9018     }
9019 }
9020 
9021 
9022 /*===========================================================================
9023  * FUNCTION   : getTemporalDenoiseProcessPlate
9024  *
9025  * DESCRIPTION: query temporal denoise process plate
9026  *
9027  * PARAMETERS : None
9028  *
9029  * RETURN     : TNR prcocess plate value
9030  *==========================================================================*/
getTemporalDenoiseProcessPlate()9031 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9032 {
9033     char prop[PROPERTY_VALUE_MAX];
9034     memset(prop, 0, sizeof(prop));
9035     property_get("persist.tnr.process.plates", prop, "0");
9036     int processPlate = atoi(prop);
9037     switch(processPlate) {
9038     case 0:
9039         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9040     case 1:
9041         return CAM_WAVELET_DENOISE_CBCR_ONLY;
9042     case 2:
9043         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9044     case 3:
9045         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9046     default:
9047         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9048     }
9049 }
9050 
9051 
9052 /*===========================================================================
9053  * FUNCTION   : extractSceneMode
9054  *
9055  * DESCRIPTION: Extract scene mode from frameworks set metadata
9056  *
9057  * PARAMETERS :
9058  *      @frame_settings: CameraMetadata reference
9059  *      @metaMode: ANDROID_CONTORL_MODE
9060  *      @hal_metadata: hal metadata structure
9061  *
9062  * RETURN     : None
9063  *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)9064 int32_t QCamera3HardwareInterface::extractSceneMode(
9065         const CameraMetadata &frame_settings, uint8_t metaMode,
9066         metadata_buffer_t *hal_metadata)
9067 {
9068     int32_t rc = NO_ERROR;
9069 
9070     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9071         camera_metadata_ro_entry entry =
9072                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9073         if (0 == entry.count)
9074             return rc;
9075 
9076         uint8_t fwk_sceneMode = entry.data.u8[0];
9077 
9078         int val = lookupHalName(SCENE_MODES_MAP,
9079                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9080                 fwk_sceneMode);
9081         if (NAME_NOT_FOUND != val) {
9082             uint8_t sceneMode = (uint8_t)val;
9083             CDBG("%s: sceneMode: %d", __func__, sceneMode);
9084             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9085                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9086                 rc = BAD_VALUE;
9087             }
9088         }
9089     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9090             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9091         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9092         CDBG("%s: sceneMode: %d", __func__, sceneMode);
9093         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9094                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9095             rc = BAD_VALUE;
9096         }
9097     }
9098     return rc;
9099 }
9100 
9101 /*===========================================================================
9102  * FUNCTION   : needRotationReprocess
9103  *
9104  * DESCRIPTION: if rotation needs to be done by reprocess in pp
9105  *
9106  * PARAMETERS : none
9107  *
9108  * RETURN     : true: needed
9109  *              false: no need
9110  *==========================================================================*/
needRotationReprocess()9111 bool QCamera3HardwareInterface::needRotationReprocess()
9112 {
9113     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9114         // current rotation is not zero, and pp has the capability to process rotation
9115         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
9116         return true;
9117     }
9118 
9119     return false;
9120 }
9121 
9122 /*===========================================================================
9123  * FUNCTION   : needReprocess
9124  *
9125  * DESCRIPTION: if reprocess in needed
9126  *
9127  * PARAMETERS : none
9128  *
9129  * RETURN     : true: needed
9130  *              false: no need
9131  *==========================================================================*/
needReprocess(uint32_t postprocess_mask)9132 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9133 {
9134     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
9135         // TODO: add for ZSL HDR later
9136         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9137         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9138             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
9139             return true;
9140         } else {
9141             CDBG_HIGH("%s: already post processed frame", __func__);
9142             return false;
9143         }
9144     }
9145     return needRotationReprocess();
9146 }
9147 
9148 /*===========================================================================
9149  * FUNCTION   : needJpegRotation
9150  *
9151  * DESCRIPTION: if rotation from jpeg is needed
9152  *
9153  * PARAMETERS : none
9154  *
9155  * RETURN     : true: needed
9156  *              false: no need
9157  *==========================================================================*/
needJpegRotation()9158 bool QCamera3HardwareInterface::needJpegRotation()
9159 {
9160    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9161     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9162        CDBG("%s: Need Jpeg to do the rotation", __func__);
9163        return true;
9164     }
9165     return false;
9166 }
9167 
9168 /*===========================================================================
9169  * FUNCTION   : addOfflineReprocChannel
9170  *
9171  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9172  *              coming from input channel
9173  *
9174  * PARAMETERS :
9175  *   @config  : reprocess configuration
9176  *   @inputChHandle : pointer to the input (source) channel
9177  *
9178  *
9179  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9180  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)9181 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9182         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9183 {
9184     int32_t rc = NO_ERROR;
9185     QCamera3ReprocessChannel *pChannel = NULL;
9186 
9187     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9188             mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9189             CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9190     if (NULL == pChannel) {
9191         ALOGE("%s: no mem for reprocess channel", __func__);
9192         return NULL;
9193     }
9194 
9195     rc = pChannel->initialize(IS_TYPE_NONE);
9196     if (rc != NO_ERROR) {
9197         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
9198         delete pChannel;
9199         return NULL;
9200     }
9201 
9202     // pp feature config
9203     cam_pp_feature_config_t pp_config;
9204     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9205 
9206     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9207 
9208     rc = pChannel->addReprocStreamsFromSource(pp_config,
9209             config,
9210             IS_TYPE_NONE,
9211             mMetadataChannel);
9212 
9213     if (rc != NO_ERROR) {
9214         delete pChannel;
9215         return NULL;
9216     }
9217     return pChannel;
9218 }
9219 
9220 /*===========================================================================
9221  * FUNCTION   : getMobicatMask
9222  *
9223  * DESCRIPTION: returns mobicat mask
9224  *
9225  * PARAMETERS : none
9226  *
9227  * RETURN     : mobicat mask
9228  *
9229  *==========================================================================*/
getMobicatMask()9230 uint8_t QCamera3HardwareInterface::getMobicatMask()
9231 {
9232     return m_MobicatMask;
9233 }
9234 
9235 /*===========================================================================
9236  * FUNCTION   : setMobicat
9237  *
9238  * DESCRIPTION: set Mobicat on/off.
9239  *
9240  * PARAMETERS :
9241  *   @params  : none
9242  *
9243  * RETURN     : int32_t type of status
9244  *              NO_ERROR  -- success
9245  *              none-zero failure code
9246  *==========================================================================*/
setMobicat()9247 int32_t QCamera3HardwareInterface::setMobicat()
9248 {
9249     char value [PROPERTY_VALUE_MAX];
9250     property_get("persist.camera.mobicat", value, "0");
9251     int32_t ret = NO_ERROR;
9252     uint8_t enableMobi = (uint8_t)atoi(value);
9253 
9254     if (enableMobi) {
9255         tune_cmd_t tune_cmd;
9256         tune_cmd.type = SET_RELOAD_CHROMATIX;
9257         tune_cmd.module = MODULE_ALL;
9258         tune_cmd.value = TRUE;
9259         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9260                 CAM_INTF_PARM_SET_VFE_COMMAND,
9261                 tune_cmd);
9262 
9263         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9264                 CAM_INTF_PARM_SET_PP_COMMAND,
9265                 tune_cmd);
9266     }
9267     m_MobicatMask = enableMobi;
9268 
9269     return ret;
9270 }
9271 
9272 /*===========================================================================
9273 * FUNCTION   : getLogLevel
9274 *
9275 * DESCRIPTION: Reads the log level property into a variable
9276 *
9277 * PARAMETERS :
9278 *   None
9279 *
9280 * RETURN     :
9281 *   None
9282 *==========================================================================*/
getLogLevel()9283 void QCamera3HardwareInterface::getLogLevel()
9284 {
9285     char prop[PROPERTY_VALUE_MAX];
9286     uint32_t globalLogLevel = 0;
9287 
9288     property_get("persist.camera.hal.debug", prop, "0");
9289     int val = atoi(prop);
9290     if (0 <= val) {
9291         gCamHal3LogLevel = (uint32_t)val;
9292     }
9293     property_get("persist.camera.global.debug", prop, "0");
9294     val = atoi(prop);
9295     if (0 <= val) {
9296         globalLogLevel = (uint32_t)val;
9297     }
9298 
9299     /* Highest log level among hal.logs and global.logs is selected */
9300     if (gCamHal3LogLevel < globalLogLevel)
9301         gCamHal3LogLevel = globalLogLevel;
9302 
9303     return;
9304 }
9305 
9306 /*===========================================================================
9307  * FUNCTION   : validateStreamRotations
9308  *
9309  * DESCRIPTION: Check if the rotations requested are supported
9310  *
9311  * PARAMETERS :
9312  *   @stream_list : streams to be configured
9313  *
9314  * RETURN     : NO_ERROR on success
9315  *              -EINVAL on failure
9316  *
9317  *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)9318 int QCamera3HardwareInterface::validateStreamRotations(
9319         camera3_stream_configuration_t *streamList)
9320 {
9321     int rc = NO_ERROR;
9322 
9323     /*
9324     * Loop through all streams requested in configuration
9325     * Check if unsupported rotations have been requested on any of them
9326     */
9327     for (size_t j = 0; j < streamList->num_streams; j++){
9328         camera3_stream_t *newStream = streamList->streams[j];
9329 
9330         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9331         bool isImplDef = (newStream->format ==
9332                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9333         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9334                 isImplDef);
9335 
9336         if (isRotated && (!isImplDef || isZsl)) {
9337             ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9338                     "type:%d and stream format:%d", __func__,
9339                     newStream->rotation, newStream->stream_type,
9340                     newStream->format);
9341             rc = -EINVAL;
9342             break;
9343         }
9344     }
9345     return rc;
9346 }
9347 
9348 /*===========================================================================
9349 * FUNCTION   : getFlashInfo
9350 *
9351 * DESCRIPTION: Retrieve information about whether the device has a flash.
9352 *
9353 * PARAMETERS :
9354 *   @cameraId  : Camera id to query
9355 *   @hasFlash  : Boolean indicating whether there is a flash device
9356 *                associated with given camera
9357 *   @flashNode : If a flash device exists, this will be its device node.
9358 *
9359 * RETURN     :
9360 *   None
9361 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])9362 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9363         bool& hasFlash,
9364         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9365 {
9366     cam_capability_t* camCapability = gCamCapability[cameraId];
9367     if (NULL == camCapability) {
9368         hasFlash = false;
9369         flashNode[0] = '\0';
9370     } else {
9371         hasFlash = camCapability->flash_available;
9372         strlcpy(flashNode,
9373                 (char*)camCapability->flash_dev_name,
9374                 QCAMERA_MAX_FILEPATH_LENGTH);
9375     }
9376 }
9377 
9378 /*===========================================================================
9379 * FUNCTION   : getEepromVersionInfo
9380 *
9381 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
9382 *
9383 * PARAMETERS : None
9384 *
9385 * RETURN     : string describing EEPROM version
9386 *              "\0" if no such info available
9387 *==========================================================================*/
getEepromVersionInfo()9388 const char *QCamera3HardwareInterface::getEepromVersionInfo()
9389 {
9390     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9391 }
9392 
9393 /*===========================================================================
9394 * FUNCTION   : getLdafCalib
9395 *
9396 * DESCRIPTION: Retrieve Laser AF calibration data
9397 *
9398 * PARAMETERS : None
9399 *
9400 * RETURN     : Two uint32_t describing laser AF calibration data
9401 *              NULL if none is available.
9402 *==========================================================================*/
getLdafCalib()9403 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9404 {
9405     if (mLdafCalibExist) {
9406         return &mLdafCalib[0];
9407     } else {
9408         return NULL;
9409     }
9410 }
9411 
9412 /*===========================================================================
9413  * FUNCTION   : dynamicUpdateMetaStreamInfo
9414  *
9415  * DESCRIPTION: This function:
9416  *             (1) stops all the channels
9417  *             (2) returns error on pending requests and buffers
9418  *             (3) sends metastream_info in setparams
9419  *             (4) starts all channels
9420  *             This is useful when sensor has to be restarted to apply any
9421  *             settings such as frame rate from a different sensor mode
9422  *
9423  * PARAMETERS : None
9424  *
9425  * RETURN     : NO_ERROR on success
9426  *              Error codes on failure
9427  *
9428  *==========================================================================*/
dynamicUpdateMetaStreamInfo()9429 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9430 {
9431     ATRACE_CALL();
9432     int rc = NO_ERROR;
9433 
9434     CDBG("%s: E", __func__);
9435 
9436     rc = stopAllChannels();
9437     if (rc < 0) {
9438         ALOGE("%s: stopAllChannels failed", __func__);
9439         return rc;
9440     }
9441 
9442     rc = notifyErrorForPendingRequests();
9443     if (rc < 0) {
9444         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9445         return rc;
9446     }
9447 
9448     /* Send meta stream info once again so that ISP can start */
9449     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9450             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9451     CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9452     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9453             mParameters);
9454     if (rc < 0) {
9455         ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9456                 __func__);
9457     }
9458 
9459     rc = startAllChannels();
9460     if (rc < 0) {
9461         ALOGE("%s: startAllChannels failed", __func__);
9462         return rc;
9463     }
9464 
9465     CDBG("%s:%d X", __func__, __LINE__);
9466     return rc;
9467 }
9468 
9469 /*===========================================================================
9470  * FUNCTION   : stopAllChannels
9471  *
9472  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9473  *
9474  * PARAMETERS : None
9475  *
9476  * RETURN     : NO_ERROR on success
9477  *              Error codes on failure
9478  *
9479  *==========================================================================*/
stopAllChannels()9480 int32_t QCamera3HardwareInterface::stopAllChannels()
9481 {
9482     int32_t rc = NO_ERROR;
9483 
9484     // Stop the Streams/Channels
9485     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9486         it != mStreamInfo.end(); it++) {
9487         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9488         if (channel != nullptr) {
9489             channel->stop();
9490         }
9491         (*it)->status = INVALID;
9492     }
9493 
9494     if (mSupportChannel) {
9495         mSupportChannel->stop();
9496     }
9497     if (mAnalysisChannel) {
9498         mAnalysisChannel->stop();
9499     }
9500     if (mRawDumpChannel) {
9501         mRawDumpChannel->stop();
9502     }
9503     if (mMetadataChannel) {
9504         /* If content of mStreamInfo is not 0, there is metadata stream */
9505         mMetadataChannel->stop();
9506     }
9507 
9508     CDBG("%s:%d All channels stopped", __func__, __LINE__);
9509     return rc;
9510 }
9511 
9512 /*===========================================================================
9513  * FUNCTION   : startAllChannels
9514  *
9515  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9516  *
9517  * PARAMETERS : None
9518  *
9519  * RETURN     : NO_ERROR on success
9520  *              Error codes on failure
9521  *
9522  *==========================================================================*/
startAllChannels()9523 int32_t QCamera3HardwareInterface::startAllChannels()
9524 {
9525     int32_t rc = NO_ERROR;
9526 
9527     CDBG("%s: Start all channels ", __func__);
9528     // Start the Streams/Channels
9529     if (mMetadataChannel) {
9530         /* If content of mStreamInfo is not 0, there is metadata stream */
9531         rc = mMetadataChannel->start();
9532         if (rc < 0) {
9533             ALOGE("%s: META channel start failed", __func__);
9534             return rc;
9535         }
9536     }
9537     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9538         it != mStreamInfo.end(); it++) {
9539         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9540         rc = channel->start();
9541         if (rc < 0) {
9542             ALOGE("%s: channel start failed", __func__);
9543             return rc;
9544         }
9545     }
9546     if (mAnalysisChannel) {
9547         mAnalysisChannel->start();
9548     }
9549     if (mSupportChannel) {
9550         rc = mSupportChannel->start();
9551         if (rc < 0) {
9552             ALOGE("%s: Support channel start failed", __func__);
9553             return rc;
9554         }
9555     }
9556     if (mRawDumpChannel) {
9557         rc = mRawDumpChannel->start();
9558         if (rc < 0) {
9559             ALOGE("%s: RAW dump channel start failed", __func__);
9560             return rc;
9561         }
9562     }
9563 
9564     CDBG("%s:%d All channels started", __func__, __LINE__);
9565     return rc;
9566 }
9567 
9568 /*===========================================================================
9569  * FUNCTION   : notifyErrorForPendingRequests
9570  *
9571  * DESCRIPTION: This function sends error for all the pending requests/buffers
9572  *
9573  * PARAMETERS : None
9574  *
9575  * RETURN     : Error codes
9576  *              NO_ERROR on success
9577  *
9578  *==========================================================================*/
notifyErrorForPendingRequests()9579 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9580 {
9581     int32_t rc = NO_ERROR;
9582     unsigned int frameNum = 0;
9583     camera3_capture_result_t result;
9584     camera3_stream_buffer_t *pStream_Buf = NULL;
9585     FlushMap flushMap;
9586 
9587     memset(&result, 0, sizeof(camera3_capture_result_t));
9588 
9589     if (mPendingRequestsList.size() > 0) {
9590         pendingRequestIterator i = mPendingRequestsList.begin();
9591         frameNum = i->frame_number;
9592     } else {
9593         /* There might still be pending buffers even though there are
9594          no pending requests. Setting the frameNum to MAX so that
9595          all the buffers with smaller frame numbers are returned */
9596         frameNum = UINT_MAX;
9597     }
9598 
9599     CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9600       __func__, frameNum);
9601 
9602     // Go through the pending buffers and group them depending
9603     // on frame number
9604     for (List<PendingBufferInfo>::iterator k =
9605             mPendingBuffersMap.mPendingBufferList.begin();
9606             k != mPendingBuffersMap.mPendingBufferList.end();) {
9607 
9608         if (k->frame_number < frameNum) {
9609             ssize_t idx = flushMap.indexOfKey(k->frame_number);
9610             if (idx == NAME_NOT_FOUND) {
9611                 Vector<PendingBufferInfo> pending;
9612                 pending.add(*k);
9613                 flushMap.add(k->frame_number, pending);
9614             } else {
9615                 Vector<PendingBufferInfo> &pending =
9616                         flushMap.editValueFor(k->frame_number);
9617                 pending.add(*k);
9618             }
9619 
9620             mPendingBuffersMap.num_buffers--;
9621             k = mPendingBuffersMap.mPendingBufferList.erase(k);
9622         } else {
9623             k++;
9624         }
9625     }
9626 
9627     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9628         uint32_t frame_number = flushMap.keyAt(iFlush);
9629         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9630 
9631         // Send Error notify to frameworks for each buffer for which
9632         // metadata buffer is already sent
9633         CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9634           __func__, frame_number, pending.size());
9635 
9636         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9637         if (NULL == pStream_Buf) {
9638             ALOGE("%s: No memory for pending buffers array", __func__);
9639             return NO_MEMORY;
9640         }
9641         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9642 
9643         for (size_t j = 0; j < pending.size(); j++) {
9644             const PendingBufferInfo &info = pending.itemAt(j);
9645             camera3_notify_msg_t notify_msg;
9646             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9647             notify_msg.type = CAMERA3_MSG_ERROR;
9648             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9649             notify_msg.message.error.error_stream = info.stream;
9650             notify_msg.message.error.frame_number = frame_number;
9651             pStream_Buf[j].acquire_fence = -1;
9652             pStream_Buf[j].release_fence = -1;
9653             pStream_Buf[j].buffer = info.buffer;
9654             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9655             pStream_Buf[j].stream = info.stream;
9656             mCallbackOps->notify(mCallbackOps, &notify_msg);
9657             CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9658                     frame_number, info.stream);
9659         }
9660 
9661         result.result = NULL;
9662         result.frame_number = frame_number;
9663         result.num_output_buffers = (uint32_t)pending.size();
9664         result.output_buffers = pStream_Buf;
9665         mCallbackOps->process_capture_result(mCallbackOps, &result);
9666 
9667         delete [] pStream_Buf;
9668     }
9669 
9670     CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9671 
9672     flushMap.clear();
9673     for (List<PendingBufferInfo>::iterator k =
9674             mPendingBuffersMap.mPendingBufferList.begin();
9675             k != mPendingBuffersMap.mPendingBufferList.end();) {
9676         ssize_t idx = flushMap.indexOfKey(k->frame_number);
9677         if (idx == NAME_NOT_FOUND) {
9678             Vector<PendingBufferInfo> pending;
9679             pending.add(*k);
9680             flushMap.add(k->frame_number, pending);
9681         } else {
9682             Vector<PendingBufferInfo> &pending =
9683                     flushMap.editValueFor(k->frame_number);
9684             pending.add(*k);
9685         }
9686 
9687         mPendingBuffersMap.num_buffers--;
9688         k = mPendingBuffersMap.mPendingBufferList.erase(k);
9689     }
9690 
9691     pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9692 
9693     // Go through the pending requests info and send error request to framework
9694     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9695         uint32_t frame_number = flushMap.keyAt(iFlush);
9696         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9697         CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9698               __func__, frame_number);
9699 
9700         // Send shutter notify to frameworks
9701         camera3_notify_msg_t notify_msg;
9702         memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9703         notify_msg.type = CAMERA3_MSG_ERROR;
9704         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9705         notify_msg.message.error.error_stream = NULL;
9706         notify_msg.message.error.frame_number = frame_number;
9707         mCallbackOps->notify(mCallbackOps, &notify_msg);
9708 
9709         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9710         if (NULL == pStream_Buf) {
9711             ALOGE("%s: No memory for pending buffers array", __func__);
9712             return NO_MEMORY;
9713         }
9714         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9715 
9716         for (size_t j = 0; j < pending.size(); j++) {
9717             const PendingBufferInfo &info = pending.itemAt(j);
9718             pStream_Buf[j].acquire_fence = -1;
9719             pStream_Buf[j].release_fence = -1;
9720             pStream_Buf[j].buffer = info.buffer;
9721             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9722             pStream_Buf[j].stream = info.stream;
9723         }
9724 
9725         result.input_buffer = i->input_buffer;
9726         result.num_output_buffers = (uint32_t)pending.size();
9727         result.output_buffers = pStream_Buf;
9728         result.result = NULL;
9729         result.frame_number = frame_number;
9730         mCallbackOps->process_capture_result(mCallbackOps, &result);
9731         delete [] pStream_Buf;
9732         i = erasePendingRequest(i);
9733     }
9734 
9735     /* Reset pending frame Drop list and requests list */
9736     mPendingFrameDropList.clear();
9737 
9738     flushMap.clear();
9739     mPendingBuffersMap.num_buffers = 0;
9740     mPendingBuffersMap.mPendingBufferList.clear();
9741     mPendingReprocessResultList.clear();
9742     CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9743 
9744     return rc;
9745 }
9746 
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)9747 bool QCamera3HardwareInterface::isOnEncoder(
9748         const cam_dimension_t max_viewfinder_size,
9749         uint32_t width, uint32_t height)
9750 {
9751     return (width > (uint32_t)max_viewfinder_size.width ||
9752             height > (uint32_t)max_viewfinder_size.height);
9753 }
9754 
9755 /*===========================================================================
9756  * FUNCTION   : setBundleInfo
9757  *
9758  * DESCRIPTION: Set bundle info for all streams that are bundle.
9759  *
9760  * PARAMETERS : None
9761  *
9762  * RETURN     : NO_ERROR on success
9763  *              Error codes on failure
9764  *==========================================================================*/
setBundleInfo()9765 int32_t QCamera3HardwareInterface::setBundleInfo()
9766 {
9767     int32_t rc = NO_ERROR;
9768 
9769     if (mChannelHandle) {
9770         cam_bundle_config_t bundleInfo;
9771         memset(&bundleInfo, 0, sizeof(bundleInfo));
9772         rc = mCameraHandle->ops->get_bundle_info(
9773                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9774         if (rc != NO_ERROR) {
9775             ALOGE("%s: get_bundle_info failed", __func__);
9776             return rc;
9777         }
9778         if (mAnalysisChannel) {
9779             mAnalysisChannel->setBundleInfo(bundleInfo);
9780         }
9781         if (mSupportChannel) {
9782             mSupportChannel->setBundleInfo(bundleInfo);
9783         }
9784         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9785                 it != mStreamInfo.end(); it++) {
9786             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9787             channel->setBundleInfo(bundleInfo);
9788         }
9789         if (mRawDumpChannel) {
9790             mRawDumpChannel->setBundleInfo(bundleInfo);
9791         }
9792     }
9793 
9794     return rc;
9795 }
9796 
9797 }; //end namespace qcamera
9798