• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33 
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <ui/Fence.h>
46 #include <gralloc_priv.h>
47 #include "QCamera3HWI.h"
48 #include "QCamera3Mem.h"
49 #include "QCamera3Channel.h"
50 #include "QCamera3PostProc.h"
51 #include "QCamera3VendorTags.h"
52 
53 using namespace android;
54 
55 namespace qcamera {
56 
57 #define MAX(a, b) ((a) > (b) ? (a) : (b))
58 
59 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
60 
61 #define EMPTY_PIPELINE_DELAY 2
62 #define PARTIAL_RESULT_COUNT 2
63 #define FRAME_SKIP_DELAY     0
64 
65 #define VIDEO_4K_WIDTH  3840
66 #define VIDEO_4K_HEIGHT 2160
67 
68 #define MAX_RAW_STREAMS        1
69 #define MAX_STALLING_STREAMS   1
70 #define MAX_PROCESSED_STREAMS  3
71 
72 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
73 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
74 volatile uint32_t gCamHal3LogLevel = 1;
75 
76 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
77     {"On",  CAM_CDS_MODE_ON},
78     {"Off", CAM_CDS_MODE_OFF},
79     {"Auto",CAM_CDS_MODE_AUTO}
80 };
81 
82 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
83     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
84     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
85     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
86     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
87     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
88     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
89     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
90     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
91     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
92 };
93 
94 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
95     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
96     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
97     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
98     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
99     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
100     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
101     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
102     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
103     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
104 };
105 
106 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
107     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
108     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
109     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
110     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
111     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
112     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
113     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
114     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
115     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
116     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
117     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
118     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
119     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
120     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
121     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
122     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
123 };
124 
125 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
126     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
127     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
128     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
129     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
130     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
131     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
132     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
133 };
134 
135 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
136     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
137             CAM_COLOR_CORRECTION_ABERRATION_OFF },
138     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
139             CAM_COLOR_CORRECTION_ABERRATION_FAST },
140     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
141             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
142 };
143 
144 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
145     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
146     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
147     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
148     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
149 };
150 
151 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
152     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
153     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
154     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
155     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
156     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
157 };
158 
159 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
160     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
161     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
162     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
163 };
164 
165 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
166     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
167     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
168 };
169 
170 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
171     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
172       CAM_FOCUS_UNCALIBRATED },
173     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
174       CAM_FOCUS_APPROXIMATE },
175     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
176       CAM_FOCUS_CALIBRATED }
177 };
178 
179 const int32_t available_thumbnail_sizes[] = {0, 0,
180                                              176, 144,
181                                              320, 240,
182                                              432, 288,
183                                              480, 288,
184                                              512, 288,
185                                              512, 384};
186 
187 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
188     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
189     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
190     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
191     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
192     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
193 };
194 
195 /* Since there is no mapping for all the options some Android enum are not listed.
196  * Also, the order in this list is important because while mapping from HAL to Android it will
197  * traverse from lower to higher index which means that for HAL values that are map to different
198  * Android values, the traverse logic will select the first one found.
199  */
200 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
201     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
202     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
203     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
204     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
205     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
206     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
207     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
208     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
209     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
210     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
211     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
212     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
213     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
214     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
215     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
216     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
217 };
218 
219 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
220     initialize:                         QCamera3HardwareInterface::initialize,
221     configure_streams:                  QCamera3HardwareInterface::configure_streams,
222     register_stream_buffers:            NULL,
223     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
224     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
225     get_metadata_vendor_tag_ops:        NULL,
226     dump:                               QCamera3HardwareInterface::dump,
227     flush:                              QCamera3HardwareInterface::flush,
228     reserved:                           {0},
229 };
230 
231 /*===========================================================================
232  * FUNCTION   : QCamera3HardwareInterface
233  *
234  * DESCRIPTION: constructor of QCamera3HardwareInterface
235  *
236  * PARAMETERS :
237  *   @cameraId  : camera ID
238  *
239  * RETURN     : none
240  *==========================================================================*/
QCamera3HardwareInterface(int cameraId,const camera_module_callbacks_t * callbacks)241 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
242                         const camera_module_callbacks_t *callbacks)
243     : mCameraId(cameraId),
244       mCameraHandle(NULL),
245       mCameraOpened(false),
246       mCameraInitialized(false),
247       mCallbackOps(NULL),
248       mInputStream(NULL),
249       mMetadataChannel(NULL),
250       mPictureChannel(NULL),
251       mRawChannel(NULL),
252       mSupportChannel(NULL),
253       mRawDumpChannel(NULL),
254       mFirstRequest(false),
255       mFlush(false),
256       mParamHeap(NULL),
257       mParameters(NULL),
258       m_bIsVideo(false),
259       m_bIs4KVideo(false),
260       mEisEnable(0),
261       mLoopBackResult(NULL),
262       mMinProcessedFrameDuration(0),
263       mMinJpegFrameDuration(0),
264       mMinRawFrameDuration(0),
265       m_pPowerModule(NULL),
266       mMetaFrameCount(0),
267       mCallbacks(callbacks),
268       mCaptureIntent(0)
269 {
270     getLogLevel();
271     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
272     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
273     mCameraDevice.common.close = close_camera_device;
274     mCameraDevice.ops = &mCameraOps;
275     mCameraDevice.priv = this;
276     gCamCapability[cameraId]->version = CAM_HAL_V3;
277     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
278     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
279     gCamCapability[cameraId]->min_num_pp_bufs = 3;
280 
281     pthread_cond_init(&mRequestCond, NULL);
282     mPendingRequest = 0;
283     mCurrentRequestId = -1;
284     pthread_mutex_init(&mMutex, NULL);
285 
286     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
287         mDefaultMetadata[i] = NULL;
288 
289 #ifdef HAS_MULTIMEDIA_HINTS
290     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
291         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
292     }
293 #endif
294 
295     char prop[PROPERTY_VALUE_MAX];
296     property_get("persist.camera.raw.dump", prop, "0");
297     mEnableRawDump = atoi(prop);
298     if (mEnableRawDump)
299         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
300 }
301 
302 /*===========================================================================
303  * FUNCTION   : ~QCamera3HardwareInterface
304  *
305  * DESCRIPTION: destructor of QCamera3HardwareInterface
306  *
307  * PARAMETERS : none
308  *
309  * RETURN     : none
310  *==========================================================================*/
~QCamera3HardwareInterface()311 QCamera3HardwareInterface::~QCamera3HardwareInterface()
312 {
313     CDBG("%s: E", __func__);
314     /* We need to stop all streams before deleting any stream */
315 
316 
317     if (mRawDumpChannel) {
318         mRawDumpChannel->stop();
319     }
320 
321     // NOTE: 'camera3_stream_t *' objects are already freed at
322     //        this stage by the framework
323     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
324         it != mStreamInfo.end(); it++) {
325         QCamera3Channel *channel = (*it)->channel;
326         if (channel) {
327             channel->stop();
328         }
329     }
330     if (mSupportChannel)
331         mSupportChannel->stop();
332 
333     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
334         it != mStreamInfo.end(); it++) {
335         QCamera3Channel *channel = (*it)->channel;
336         if (channel)
337             delete channel;
338         free (*it);
339     }
340     if (mSupportChannel) {
341         delete mSupportChannel;
342         mSupportChannel = NULL;
343     }
344 
345     if (mRawDumpChannel) {
346         delete mRawDumpChannel;
347         mRawDumpChannel = NULL;
348     }
349     mPictureChannel = NULL;
350 
351     /* Clean up all channels */
352     if (mCameraInitialized) {
353         if (mMetadataChannel) {
354             mMetadataChannel->stop();
355             delete mMetadataChannel;
356             mMetadataChannel = NULL;
357         }
358         deinitParameters();
359     }
360 
361     if (mCameraOpened)
362         closeCamera();
363 
364     mPendingBuffersMap.mPendingBufferList.clear();
365     mPendingRequestsList.clear();
366     mPendingReprocessResultList.clear();
367 
368     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
369         if (mDefaultMetadata[i])
370             free_camera_metadata(mDefaultMetadata[i]);
371 
372     pthread_cond_destroy(&mRequestCond);
373 
374     pthread_mutex_destroy(&mMutex);
375     CDBG("%s: X", __func__);
376 }
377 
378 /*===========================================================================
379  * FUNCTION   : camEvtHandle
380  *
381  * DESCRIPTION: Function registered to mm-camera-interface to handle events
382  *
383  * PARAMETERS :
384  *   @camera_handle : interface layer camera handle
385  *   @evt           : ptr to event
386  *   @user_data     : user data ptr
387  *
388  * RETURN     : none
389  *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)390 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
391                                           mm_camera_event_t *evt,
392                                           void *user_data)
393 {
394     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
395     camera3_notify_msg_t notify_msg;
396     if (obj && evt) {
397         switch(evt->server_event_type) {
398             case CAM_EVENT_TYPE_DAEMON_DIED:
399                 ALOGE("%s: Fatal, camera daemon died", __func__);
400                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
401                 notify_msg.type = CAMERA3_MSG_ERROR;
402                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
403                 notify_msg.message.error.error_stream = NULL;
404                 notify_msg.message.error.frame_number = 0;
405                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
406                 break;
407 
408             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
409                 CDBG("%s: HAL got request pull from Daemon", __func__);
410                 pthread_mutex_lock(&obj->mMutex);
411                 obj->mWokenUpByDaemon = true;
412                 obj->unblockRequestIfNecessary();
413                 pthread_mutex_unlock(&obj->mMutex);
414                 break;
415 
416             default:
417                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
418                         evt->server_event_type);
419                 break;
420         }
421     } else {
422         ALOGE("%s: NULL user_data/evt", __func__);
423     }
424 }
425 
426 /*===========================================================================
427  * FUNCTION   : openCamera
428  *
429  * DESCRIPTION: open camera
430  *
431  * PARAMETERS :
432  *   @hw_device  : double ptr for camera device struct
433  *
434  * RETURN     : int32_t type of status
435  *              NO_ERROR  -- success
436  *              none-zero failure code
437  *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)438 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
439 {
440     int rc = 0;
441     if (mCameraOpened) {
442         *hw_device = NULL;
443         return PERMISSION_DENIED;
444     }
445 
446     rc = openCamera();
447     if (rc == 0) {
448         *hw_device = &mCameraDevice.common;
449     } else
450         *hw_device = NULL;
451 
452 #ifdef HAS_MULTIMEDIA_HINTS
453     if (rc == 0) {
454         if (m_pPowerModule) {
455             if (m_pPowerModule->powerHint) {
456                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
457                         (void *)"state=1");
458             }
459         }
460     }
461 #endif
462     return rc;
463 }
464 
465 /*===========================================================================
466  * FUNCTION   : openCamera
467  *
468  * DESCRIPTION: open camera
469  *
470  * PARAMETERS : none
471  *
472  * RETURN     : int32_t type of status
473  *              NO_ERROR  -- success
474  *              none-zero failure code
475  *==========================================================================*/
openCamera()476 int QCamera3HardwareInterface::openCamera()
477 {
478     int rc = 0;
479 
480     ATRACE_CALL();
481     if (mCameraHandle) {
482         ALOGE("Failure: Camera already opened");
483         return ALREADY_EXISTS;
484     }
485     mCameraHandle = camera_open(mCameraId);
486     if (!mCameraHandle) {
487         ALOGE("camera_open failed.");
488         return UNKNOWN_ERROR;
489     }
490 
491     mCameraOpened = true;
492 
493     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
494             camEvtHandle, (void *)this);
495 
496     if (rc < 0) {
497         ALOGE("%s: Error, failed to register event callback", __func__);
498         /* Not closing camera here since it is already handled in destructor */
499         return FAILED_TRANSACTION;
500     }
501 
502     return NO_ERROR;
503 }
504 
505 /*===========================================================================
506  * FUNCTION   : closeCamera
507  *
508  * DESCRIPTION: close camera
509  *
510  * PARAMETERS : none
511  *
512  * RETURN     : int32_t type of status
513  *              NO_ERROR  -- success
514  *              none-zero failure code
515  *==========================================================================*/
closeCamera()516 int QCamera3HardwareInterface::closeCamera()
517 {
518     ATRACE_CALL();
519     int rc = NO_ERROR;
520 
521     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
522     mCameraHandle = NULL;
523     mCameraOpened = false;
524 
525 #ifdef HAS_MULTIMEDIA_HINTS
526     if (rc == NO_ERROR) {
527         if (m_pPowerModule) {
528             if (m_pPowerModule->powerHint) {
529                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
530                         (void *)"state=0");
531             }
532         }
533     }
534 #endif
535 
536     return rc;
537 }
538 
539 /*===========================================================================
540  * FUNCTION   : initialize
541  *
542  * DESCRIPTION: Initialize frameworks callback functions
543  *
544  * PARAMETERS :
545  *   @callback_ops : callback function to frameworks
546  *
547  * RETURN     :
548  *
549  *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)550 int QCamera3HardwareInterface::initialize(
551         const struct camera3_callback_ops *callback_ops)
552 {
553     ATRACE_CALL();
554     int rc;
555 
556     pthread_mutex_lock(&mMutex);
557 
558     rc = initParameters();
559     if (rc < 0) {
560         ALOGE("%s: initParamters failed %d", __func__, rc);
561        goto err1;
562     }
563     mCallbackOps = callback_ops;
564 
565     pthread_mutex_unlock(&mMutex);
566     mCameraInitialized = true;
567     return 0;
568 
569 err1:
570     pthread_mutex_unlock(&mMutex);
571     return rc;
572 }
573 
574 /*===========================================================================
575  * FUNCTION   : validateStreamDimensions
576  *
577  * DESCRIPTION: Check if the configuration requested are those advertised
578  *
579  * PARAMETERS :
580  *   @stream_list : streams to be configured
581  *
582  * RETURN     :
583  *
584  *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)585 int QCamera3HardwareInterface::validateStreamDimensions(
586         camera3_stream_configuration_t *streamList)
587 {
588     int rc = NO_ERROR;
589     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
590     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
591     uint8_t jpeg_sizes_cnt = 0;
592 
593     /*
594     * Loop through all streams requested in configuration
595     * Check if unsupported sizes have been requested on any of them
596     */
597     for (size_t j = 0; j < streamList->num_streams; j++){
598         bool sizeFound = false;
599         camera3_stream_t *newStream = streamList->streams[j];
600 
601         /*
602         * Sizes are different for each type of stream format check against
603         * appropriate table.
604         */
605         switch (newStream->format) {
606         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
607         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
608         case HAL_PIXEL_FORMAT_RAW10:
609             for (int i = 0;
610                     i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
611                 if (gCamCapability[mCameraId]->raw_dim[i].width
612                         == (int32_t) newStream->width
613                     && gCamCapability[mCameraId]->raw_dim[i].height
614                         == (int32_t) newStream->height) {
615                     sizeFound = true;
616                     break;
617                 }
618             }
619             break;
620         case HAL_PIXEL_FORMAT_BLOB:
621             /* Generate JPEG sizes table */
622             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
623                     gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
624                     available_processed_sizes);
625             jpeg_sizes_cnt = filterJpegSizes(
626                     available_jpeg_sizes,
627                     available_processed_sizes,
628                     (gCamCapability[mCameraId]->picture_sizes_tbl_cnt) * 2,
629                     MAX_SIZES_CNT * 2,
630                     gCamCapability[mCameraId]->active_array_size,
631                     gCamCapability[mCameraId]->max_downscale_factor);
632 
633             /* Verify set size against generated sizes table */
634             for (int i = 0;i < jpeg_sizes_cnt/2; i++) {
635                 if ((int32_t)(newStream->width) == available_jpeg_sizes[i*2] &&
636                     (int32_t)(newStream->height) == available_jpeg_sizes[i*2+1]) {
637                     sizeFound = true;
638                     break;
639                 }
640             }
641             break;
642 
643 
644         case HAL_PIXEL_FORMAT_YCbCr_420_888:
645         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
646         default:
647             /* ZSL stream will be full active array size validate that*/
648             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
649                 if ((int32_t)(newStream->width) ==
650                     gCamCapability[mCameraId]->active_array_size.width
651                     && (int32_t)(newStream->height)  ==
652                     gCamCapability[mCameraId]->active_array_size.height) {
653                     sizeFound = true;
654                 }
655                 /* We could potentially break here to enforce ZSL stream
656                  * set from frameworks always has full active array size
657                  * but it is not clear from spec if framework will always
658                  * follow that, also we have logic to override to full array
659                  * size, so keeping this logic lenient at the moment.
660                  */
661             }
662 
663             /* Non ZSL stream still need to conform to advertised sizes*/
664             for (int i = 0;
665                 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
666                 if ((int32_t)(newStream->width) ==
667                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width
668                     && (int32_t)(newStream->height) ==
669                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height){
670                     sizeFound = true;
671                 break;
672                 }
673             }
674             break;
675         } /* End of switch(newStream->format) */
676 
677         /* We error out even if a single stream has unsupported size set */
678         if (!sizeFound) {
679             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
680                   "type:%d", __func__, newStream->width, newStream->height,
681                   newStream->format);
682             rc = -EINVAL;
683             break;
684         }
685     } /* End of for each stream */
686     return rc;
687 }
688 
689 /*===========================================================================
690  * FUNCTION   : configureStreams
691  *
692  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
693  *              and output streams.
694  *
695  * PARAMETERS :
696  *   @stream_list : streams to be configured
697  *
698  * RETURN     :
699  *
700  *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)701 int QCamera3HardwareInterface::configureStreams(
702         camera3_stream_configuration_t *streamList)
703 {
704     ATRACE_CALL();
705     int rc = 0;
706 
707     // Sanity check stream_list
708     if (streamList == NULL) {
709         ALOGE("%s: NULL stream configuration", __func__);
710         return BAD_VALUE;
711     }
712     if (streamList->streams == NULL) {
713         ALOGE("%s: NULL stream list", __func__);
714         return BAD_VALUE;
715     }
716 
717     if (streamList->num_streams < 1) {
718         ALOGE("%s: Bad number of streams requested: %d", __func__,
719                 streamList->num_streams);
720         return BAD_VALUE;
721     }
722 
723     /* first invalidate all the steams in the mStreamList
724      * if they appear again, they will be validated */
725     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
726             it != mStreamInfo.end(); it++) {
727         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
728         channel->stop();
729         (*it)->status = INVALID;
730     }
731 
732     if (mRawDumpChannel) {
733         mRawDumpChannel->stop();
734         delete mRawDumpChannel;
735         mRawDumpChannel = NULL;
736     }
737 
738     if (mSupportChannel)
739         mSupportChannel->stop();
740     if (mMetadataChannel) {
741         /* If content of mStreamInfo is not 0, there is metadata stream */
742         mMetadataChannel->stop();
743     }
744 
745     pthread_mutex_lock(&mMutex);
746 
747     /* Check whether we have video stream */
748     m_bIs4KVideo = false;
749     m_bIsVideo = false;
750     bool isZsl = false;
751     size_t videoWidth = 0;
752     size_t videoHeight = 0;
753     size_t rawStreamCnt = 0;
754     size_t stallStreamCnt = 0;
755     size_t processedStreamCnt = 0;
756     // Number of streams on ISP encoder path
757     size_t numStreamsOnEncoder = 0;
758     cam_dimension_t maxViewfinderSize;
759     bool bJpegExceeds4K = false;
760     bool bUseCommonFeatureMask = false;
761     uint32_t commonFeatureMask = 0;
762     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
763 
764     for (size_t i = 0; i < streamList->num_streams; i++) {
765         camera3_stream_t *newStream = streamList->streams[i];
766         CDBG_HIGH("%s: stream[%d] type = %d, format = %d, width = %d, height = %d",
767                 __func__, i, newStream->stream_type, newStream->format,
768                 newStream->width, newStream->height);
769         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
770                 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
771             isZsl = true;
772         }
773         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
774             if (newStream->width > VIDEO_4K_WIDTH ||
775                     newStream->height > VIDEO_4K_HEIGHT)
776                 bJpegExceeds4K = true;
777         }
778 
779         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
780                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
781             m_bIsVideo = true;
782 
783             if ((VIDEO_4K_WIDTH <= newStream->width) &&
784                     (VIDEO_4K_HEIGHT <= newStream->height)) {
785                 videoWidth = newStream->width;
786                 videoHeight = newStream->height;
787                 m_bIs4KVideo = true;
788             }
789         }
790         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
791                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
792             switch (newStream->format) {
793             case HAL_PIXEL_FORMAT_BLOB:
794                 stallStreamCnt++;
795                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
796                         newStream->height > (uint32_t)maxViewfinderSize.height) {
797                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
798                     numStreamsOnEncoder++;
799                 }
800                 break;
801             case HAL_PIXEL_FORMAT_RAW10:
802             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
803             case HAL_PIXEL_FORMAT_RAW16:
804                 rawStreamCnt++;
805                 break;
806             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
807                 processedStreamCnt++;
808                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
809                         newStream->height > (uint32_t)maxViewfinderSize.height) {
810                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
811                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
812                     } else {
813                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
814                     }
815                     numStreamsOnEncoder++;
816                 }
817                 break;
818             case HAL_PIXEL_FORMAT_YCbCr_420_888:
819             default:
820                 processedStreamCnt++;
821                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
822                         newStream->height > (uint32_t)maxViewfinderSize.height) {
823                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
824                     numStreamsOnEncoder++;
825                 }
826                 break;
827             }
828 
829         }
830     }
831 
832     /* Check if num_streams is sane */
833     if (stallStreamCnt > MAX_STALLING_STREAMS ||
834             rawStreamCnt > MAX_RAW_STREAMS ||
835             processedStreamCnt > MAX_PROCESSED_STREAMS) {
836         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
837                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
838         pthread_mutex_unlock(&mMutex);
839         return -EINVAL;
840     }
841     /* Check whether we have zsl stream or 4k video case */
842     if (isZsl && m_bIsVideo) {
843         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
844         pthread_mutex_unlock(&mMutex);
845         return -EINVAL;
846     }
847     /* Check if stream sizes are sane */
848     if (numStreamsOnEncoder > 2) {
849         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
850                 __func__);
851         pthread_mutex_unlock(&mMutex);
852         return -EINVAL;
853     } else if (1 < numStreamsOnEncoder){
854         bUseCommonFeatureMask = true;
855         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
856                 __func__);
857     }
858     /* Check if BLOB size is greater than 4k in 4k recording case */
859     if (m_bIs4KVideo && bJpegExceeds4K) {
860         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
861                 __func__);
862         pthread_mutex_unlock(&mMutex);
863         return -EINVAL;
864     }
865 
866     rc = validateStreamDimensions(streamList);
867     if (rc != NO_ERROR) {
868         ALOGE("%s: Invalid stream configuration requested!", __func__);
869         pthread_mutex_unlock(&mMutex);
870         return rc;
871     }
872 
873     camera3_stream_t *inputStream = NULL;
874     camera3_stream_t *jpegStream = NULL;
875     cam_stream_size_info_t stream_config_info;
876     for (size_t i = 0; i < streamList->num_streams; i++) {
877         camera3_stream_t *newStream = streamList->streams[i];
878         CDBG_HIGH("%s: newStream type = %d, stream format = %d stream size : %d x %d",
879                 __func__, newStream->stream_type, newStream->format,
880                  newStream->width, newStream->height);
881         //if the stream is in the mStreamList validate it
882         bool stream_exists = false;
883         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
884                 it != mStreamInfo.end(); it++) {
885             if ((*it)->stream == newStream) {
886                 QCamera3Channel *channel =
887                     (QCamera3Channel*)(*it)->stream->priv;
888                 stream_exists = true;
889                 delete channel;
890                 (*it)->status = VALID;
891                 (*it)->stream->priv = NULL;
892                 (*it)->channel = NULL;
893             }
894         }
895         if (!stream_exists) {
896             //new stream
897             stream_info_t* stream_info;
898             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
899             stream_info->stream = newStream;
900             stream_info->status = VALID;
901             stream_info->channel = NULL;
902             mStreamInfo.push_back(stream_info);
903         }
904         if (newStream->stream_type == CAMERA3_STREAM_INPUT
905                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
906             if (inputStream != NULL) {
907                 ALOGE("%s: Multiple input streams requested!", __func__);
908                 pthread_mutex_unlock(&mMutex);
909                 return BAD_VALUE;
910             }
911             inputStream = newStream;
912         }
913         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
914             jpegStream = newStream;
915         }
916     }
917     mInputStream = inputStream;
918 
919     cleanAndSortStreamInfo();
920     if (mMetadataChannel) {
921         delete mMetadataChannel;
922         mMetadataChannel = NULL;
923     }
924     if (mSupportChannel) {
925         delete mSupportChannel;
926         mSupportChannel = NULL;
927     }
928 
929     //Create metadata channel and initialize it
930     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
931                     mCameraHandle->ops, captureResultCb,
932                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
933     if (mMetadataChannel == NULL) {
934         ALOGE("%s: failed to allocate metadata channel", __func__);
935         rc = -ENOMEM;
936         pthread_mutex_unlock(&mMutex);
937         return rc;
938     }
939     rc = mMetadataChannel->initialize(IS_TYPE_NONE, mCaptureIntent);
940     if (rc < 0) {
941         ALOGE("%s: metadata channel initialization failed", __func__);
942         delete mMetadataChannel;
943         mMetadataChannel = NULL;
944         pthread_mutex_unlock(&mMutex);
945         return rc;
946     }
947 
948     /* Create dummy stream if there is one single raw or jpeg stream */
949     if (streamList->num_streams == 1 &&
950             (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
951             streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW10 ||
952             streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16 ||
953             streamList->streams[0]->format == HAL_PIXEL_FORMAT_BLOB)) {
954         mSupportChannel = new QCamera3SupportChannel(
955                 mCameraHandle->camera_handle,
956                 mCameraHandle->ops,
957                 &gCamCapability[mCameraId]->padding_info,
958                 CAM_QCOM_FEATURE_NONE,
959                 this);
960         if (!mSupportChannel) {
961             ALOGE("%s: dummy channel cannot be created", __func__);
962             pthread_mutex_unlock(&mMutex);
963             return -ENOMEM;
964         }
965    }
966 
967     bool isRawStreamRequested = false;
968     /* Allocate channel objects for the requested streams */
969     for (size_t i = 0; i < streamList->num_streams; i++) {
970         camera3_stream_t *newStream = streamList->streams[i];
971         uint32_t stream_usage = newStream->usage;
972         stream_config_info.stream_sizes[i].width = newStream->width;
973         stream_config_info.stream_sizes[i].height = newStream->height;
974         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
975             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
976             //for zsl stream the size is jpeg stream size
977             stream_config_info.stream_sizes[i].width = jpegStream->width;
978             stream_config_info.stream_sizes[i].height = jpegStream->height;
979             stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
980             stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
981         } else {
982            //for non zsl streams find out the format
983            switch (newStream->format) {
984            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
985               {
986                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
987                     stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
988                  } else {
989                     stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
990                  }
991                  stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
992               }
993               break;
994            case HAL_PIXEL_FORMAT_YCbCr_420_888:
995               stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
996               stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
997               break;
998            case HAL_PIXEL_FORMAT_BLOB:
999               stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
1000               if (m_bIs4KVideo && !isZsl) {
1001                   stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
1002               } else {
1003                   if (bUseCommonFeatureMask &&
1004                           (newStream->width > (uint32_t)maxViewfinderSize.width ||
1005                                   newStream->height > (uint32_t)maxViewfinderSize.height)) {
1006                       stream_config_info.postprocess_mask[i] = commonFeatureMask;
1007                   } else {
1008                       stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
1009                   }
1010               }
1011               if (m_bIs4KVideo) {
1012                   stream_config_info.stream_sizes[i].width = videoWidth;
1013                   stream_config_info.stream_sizes[i].height = videoHeight;
1014               }
1015               break;
1016            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1017            case HAL_PIXEL_FORMAT_RAW16:
1018            case HAL_PIXEL_FORMAT_RAW10:
1019               stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
1020               isRawStreamRequested = true;
1021               break;
1022            default:
1023               stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
1024               stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
1025               break;
1026            }
1027         }
1028         if (newStream->priv == NULL) {
1029             //New stream, construct channel
1030             switch (newStream->stream_type) {
1031             case CAMERA3_STREAM_INPUT:
1032                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
1033                 break;
1034             case CAMERA3_STREAM_BIDIRECTIONAL:
1035                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
1036                     GRALLOC_USAGE_HW_CAMERA_WRITE;
1037                 break;
1038             case CAMERA3_STREAM_OUTPUT:
1039                 /* For video encoding stream, set read/write rarely
1040                  * flag so that they may be set to un-cached */
1041                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1042                     newStream->usage =
1043                          (GRALLOC_USAGE_SW_READ_RARELY |
1044                          GRALLOC_USAGE_SW_WRITE_RARELY |
1045                          GRALLOC_USAGE_HW_CAMERA_WRITE);
1046                 else
1047                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
1048                 break;
1049             default:
1050                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1051                 break;
1052             }
1053 
1054             if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1055                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
1056                     jpegStream) {
1057                 QCamera3Channel *channel = NULL;
1058                 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
1059                 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1060                         mCameraHandle->ops, captureResultCb,
1061                         &gCamCapability[mCameraId]->padding_info,
1062                         this,
1063                         newStream,
1064                         (cam_stream_type_t) stream_config_info.type[i],
1065                         stream_config_info.postprocess_mask[i],
1066                         jpegStream->width, jpegStream->height);
1067                     if (channel == NULL) {
1068                         ALOGE("%s: allocation of channel failed", __func__);
1069                         pthread_mutex_unlock(&mMutex);
1070                         return -ENOMEM;
1071                     }
1072                     newStream->priv = channel;
1073             } else if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1074                 QCamera3Channel *channel = NULL;
1075                 switch (newStream->format) {
1076                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1077                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1078                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
1079                     channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1080                             mCameraHandle->ops, captureResultCb,
1081                             &gCamCapability[mCameraId]->padding_info,
1082                             this,
1083                             newStream,
1084                             (cam_stream_type_t) stream_config_info.type[i],
1085                             stream_config_info.postprocess_mask[i]);
1086                     if (channel == NULL) {
1087                         ALOGE("%s: allocation of channel failed", __func__);
1088                         pthread_mutex_unlock(&mMutex);
1089                         return -ENOMEM;
1090                     }
1091 
1092                     newStream->priv = channel;
1093                     break;
1094                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1095                 case HAL_PIXEL_FORMAT_RAW16:
1096                 case HAL_PIXEL_FORMAT_RAW10:
1097                     newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
1098                     mRawChannel = new QCamera3RawChannel(
1099                             mCameraHandle->camera_handle,
1100                             mCameraHandle->ops, captureResultCb,
1101                             &gCamCapability[mCameraId]->padding_info,
1102                             this, newStream, CAM_QCOM_FEATURE_NONE,
1103                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1104                     if (mRawChannel == NULL) {
1105                         ALOGE("%s: allocation of raw channel failed", __func__);
1106                         pthread_mutex_unlock(&mMutex);
1107                         return -ENOMEM;
1108                     }
1109 
1110                     newStream->priv = (QCamera3Channel*)mRawChannel;
1111                     break;
1112                 case HAL_PIXEL_FORMAT_BLOB:
1113                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
1114                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1115                             mCameraHandle->ops, captureResultCb,
1116                             &gCamCapability[mCameraId]->padding_info, this, newStream,
1117                             stream_config_info.postprocess_mask[i],
1118                             m_bIs4KVideo, mMetadataChannel);
1119                     if (mPictureChannel == NULL) {
1120                         ALOGE("%s: allocation of channel failed", __func__);
1121                         pthread_mutex_unlock(&mMutex);
1122                         return -ENOMEM;
1123                     }
1124                     newStream->priv = (QCamera3Channel*)mPictureChannel;
1125                     break;
1126 
1127                 default:
1128                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1129                     break;
1130                 }
1131             }
1132 
1133             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1134                     it != mStreamInfo.end(); it++) {
1135                 if ((*it)->stream == newStream) {
1136                     (*it)->channel = (QCamera3Channel*) newStream->priv;
1137                     break;
1138                 }
1139             }
1140         } else {
1141             // Channel already exists for this stream
1142             // Do nothing for now
1143         }
1144     }
1145 
1146     if (mPictureChannel && m_bIs4KVideo) {
1147         mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
1148     }
1149 
1150     //RAW DUMP channel
1151     if (mEnableRawDump && isRawStreamRequested == false){
1152         cam_dimension_t rawDumpSize;
1153         rawDumpSize = getMaxRawSize(mCameraId);
1154         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1155                                   mCameraHandle->ops,
1156                                   rawDumpSize,
1157                                   &gCamCapability[mCameraId]->padding_info,
1158                                   this, CAM_QCOM_FEATURE_NONE);
1159         if (!mRawDumpChannel) {
1160             ALOGE("%s: Raw Dump channel cannot be created", __func__);
1161             pthread_mutex_unlock(&mMutex);
1162             return -ENOMEM;
1163         }
1164     }
1165 
1166 
1167     stream_config_info.num_streams = streamList->num_streams;
1168     if (mSupportChannel) {
1169         stream_config_info.stream_sizes[stream_config_info.num_streams] =
1170                 QCamera3SupportChannel::kDim;
1171         stream_config_info.type[stream_config_info.num_streams] =
1172                 CAM_STREAM_TYPE_CALLBACK;
1173         stream_config_info.num_streams++;
1174     }
1175 
1176     if (mRawDumpChannel) {
1177         cam_dimension_t rawSize;
1178         rawSize = getMaxRawSize(mCameraId);
1179         stream_config_info.stream_sizes[stream_config_info.num_streams] =
1180                 rawSize;
1181         stream_config_info.type[stream_config_info.num_streams] =
1182                 CAM_STREAM_TYPE_RAW;
1183         stream_config_info.num_streams++;
1184     }
1185 
1186     // settings/parameters don't carry over for new configureStreams
1187     int32_t hal_version = CAM_HAL_V3;
1188     memset(mParameters, 0, sizeof(metadata_buffer_t));
1189 
1190     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1191             sizeof(hal_version), &hal_version);
1192 
1193     AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
1194             sizeof(cam_stream_size_info_t), &stream_config_info);
1195 
1196     int32_t tintless_value = 1;
1197     AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
1198                 sizeof(tintless_value), &tintless_value);
1199 
1200     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1201 
1202     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1203     mPendingRequestsList.clear();
1204     mPendingFrameDropList.clear();
1205     // Initialize/Reset the pending buffers list
1206     mPendingBuffersMap.num_buffers = 0;
1207     mPendingBuffersMap.mPendingBufferList.clear();
1208     mPendingReprocessResultList.clear();
1209 
1210     mFirstRequest = true;
1211 
1212     //Get min frame duration for this streams configuration
1213     deriveMinFrameDuration();
1214 
1215     pthread_mutex_unlock(&mMutex);
1216     return rc;
1217 }
1218 
1219 /*===========================================================================
1220  * FUNCTION   : validateCaptureRequest
1221  *
1222  * DESCRIPTION: validate a capture request from camera service
1223  *
1224  * PARAMETERS :
1225  *   @request : request from framework to process
1226  *
1227  * RETURN     :
1228  *
1229  *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1230 int QCamera3HardwareInterface::validateCaptureRequest(
1231                     camera3_capture_request_t *request)
1232 {
1233     ssize_t idx = 0;
1234     const camera3_stream_buffer_t *b;
1235     CameraMetadata meta;
1236 
1237     /* Sanity check the request */
1238     if (request == NULL) {
1239         ALOGE("%s: NULL capture request", __func__);
1240         return BAD_VALUE;
1241     }
1242 
1243     if (request->settings == NULL && mFirstRequest) {
1244         /*settings cannot be null for the first request*/
1245         return BAD_VALUE;
1246     }
1247 
1248     uint32_t frameNumber = request->frame_number;
1249     if (request->input_buffer != NULL &&
1250             request->input_buffer->stream != mInputStream) {
1251         ALOGE("%s: Request %d: Input buffer not from input stream!",
1252                 __FUNCTION__, frameNumber);
1253         return BAD_VALUE;
1254     }
1255     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1256         ALOGE("%s: Request %d: No output buffers provided!",
1257                 __FUNCTION__, frameNumber);
1258         return BAD_VALUE;
1259     }
1260     if (request->input_buffer != NULL) {
1261         b = request->input_buffer;
1262         QCamera3Channel *channel =
1263             static_cast<QCamera3Channel*>(b->stream->priv);
1264         if (channel == NULL) {
1265             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1266                     __func__, frameNumber, (long)idx);
1267             return BAD_VALUE;
1268         }
1269         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1270             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1271                     __func__, frameNumber, (long)idx);
1272             return BAD_VALUE;
1273         }
1274         if (b->release_fence != -1) {
1275             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1276                     __func__, frameNumber, (long)idx);
1277             return BAD_VALUE;
1278         }
1279         if (b->buffer == NULL) {
1280             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1281                     __func__, frameNumber, (long)idx);
1282             return BAD_VALUE;
1283         }
1284     }
1285 
1286     // Validate all buffers
1287     b = request->output_buffers;
1288     do {
1289         QCamera3Channel *channel =
1290                 static_cast<QCamera3Channel*>(b->stream->priv);
1291         if (channel == NULL) {
1292             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1293                     __func__, frameNumber, (long)idx);
1294             return BAD_VALUE;
1295         }
1296         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1297             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1298                     __func__, frameNumber, (long)idx);
1299             return BAD_VALUE;
1300         }
1301         if (b->release_fence != -1) {
1302             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1303                     __func__, frameNumber, (long)idx);
1304             return BAD_VALUE;
1305         }
1306         if (b->buffer == NULL) {
1307             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1308                     __func__, frameNumber, (long)idx);
1309             return BAD_VALUE;
1310         }
1311         if (*(b->buffer) == NULL) {
1312             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1313                     __func__, frameNumber, (long)idx);
1314             return BAD_VALUE;
1315         }
1316         idx++;
1317         b = request->output_buffers + idx;
1318     } while (idx < (ssize_t)request->num_output_buffers);
1319 
1320     return NO_ERROR;
1321 }
1322 
1323 /*===========================================================================
1324  * FUNCTION   : deriveMinFrameDuration
1325  *
1326  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1327  *              on currently configured streams.
1328  *
1329  * PARAMETERS : NONE
1330  *
1331  * RETURN     : NONE
1332  *
1333  *==========================================================================*/
deriveMinFrameDuration()1334 void QCamera3HardwareInterface::deriveMinFrameDuration()
1335 {
1336     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1337 
1338     maxJpegDim = 0;
1339     maxProcessedDim = 0;
1340     maxRawDim = 0;
1341 
1342     // Figure out maximum jpeg, processed, and raw dimensions
1343     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1344         it != mStreamInfo.end(); it++) {
1345 
1346         // Input stream doesn't have valid stream_type
1347         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1348             continue;
1349 
1350         int32_t dimension = (*it)->stream->width * (*it)->stream->height;
1351         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1352             if (dimension > maxJpegDim)
1353                 maxJpegDim = dimension;
1354         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1355                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1356                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1357             if (dimension > maxRawDim)
1358                 maxRawDim = dimension;
1359         } else {
1360             if (dimension > maxProcessedDim)
1361                 maxProcessedDim = dimension;
1362         }
1363     }
1364 
1365     //Assume all jpeg dimensions are in processed dimensions.
1366     if (maxJpegDim > maxProcessedDim)
1367         maxProcessedDim = maxJpegDim;
1368     //Find the smallest raw dimension that is greater or equal to jpeg dimension
1369     if (maxProcessedDim > maxRawDim) {
1370         maxRawDim = INT32_MAX;
1371         for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1372             i++) {
1373 
1374             int32_t dimension =
1375                 gCamCapability[mCameraId]->raw_dim[i].width *
1376                 gCamCapability[mCameraId]->raw_dim[i].height;
1377 
1378             if (dimension >= maxProcessedDim && dimension < maxRawDim)
1379                 maxRawDim = dimension;
1380         }
1381     }
1382 
1383     //Find minimum durations for processed, jpeg, and raw
1384     for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1385             i++) {
1386         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1387                 gCamCapability[mCameraId]->raw_dim[i].height) {
1388             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1389             break;
1390         }
1391     }
1392     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1393         if (maxProcessedDim ==
1394             gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1395             gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1396             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1397             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1398             break;
1399         }
1400     }
1401 }
1402 
1403 /*===========================================================================
1404  * FUNCTION   : getMinFrameDuration
1405  *
1406  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1407  *              and current request configuration.
1408  *
1409  * PARAMETERS : @request: requset sent by the frameworks
1410  *
1411  * RETURN     : min farme duration for a particular request
1412  *
1413  *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)1414 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1415 {
1416     bool hasJpegStream = false;
1417     bool hasRawStream = false;
1418     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1419         const camera3_stream_t *stream = request->output_buffers[i].stream;
1420         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1421             hasJpegStream = true;
1422         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1423                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1424                 stream->format == HAL_PIXEL_FORMAT_RAW16)
1425             hasRawStream = true;
1426     }
1427 
1428     if (!hasJpegStream)
1429         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1430     else
1431         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1432 }
1433 
1434 /*===========================================================================
1435  * FUNCTION   : handlePendingReprocResults
1436  *
1437  * DESCRIPTION: check and notify on any pending reprocess results
1438  *
1439  * PARAMETERS :
1440  *   @frame_number   : Pending request frame number
1441  *
1442  * RETURN     : int32_t type of status
1443  *              NO_ERROR  -- success
1444  *              none-zero failure code
1445  *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)1446 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
1447 {
1448     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
1449             j != mPendingReprocessResultList.end(); j++) {
1450         if (j->frame_number == frame_number) {
1451             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
1452 
1453             CDBG("%s: Delayed reprocess notify %d", __func__,
1454                     frame_number);
1455 
1456             for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
1457                 k != mPendingRequestsList.end(); k++) {
1458 
1459                 if (k->frame_number == j->frame_number) {
1460                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
1461                             "Take it out!!", __func__,
1462                             k->frame_number);
1463 
1464                     camera3_capture_result result;
1465                     memset(&result, 0, sizeof(camera3_capture_result));
1466                     result.frame_number = frame_number;
1467                     result.num_output_buffers = 1;
1468                     result.output_buffers =  &j->buffer;
1469                     result.input_buffer = k->input_buffer;
1470                     result.result = k->settings;
1471                     result.partial_result = PARTIAL_RESULT_COUNT;
1472                     mCallbackOps->process_capture_result(mCallbackOps, &result);
1473 
1474                     mPendingRequestsList.erase(k);
1475                     mPendingRequest--;
1476                     break;
1477                 }
1478             }
1479             mPendingReprocessResultList.erase(j);
1480             break;
1481         }
1482     }
1483     return NO_ERROR;
1484 }
1485 
1486 /*===========================================================================
1487  * FUNCTION   : handleMetadataWithLock
1488  *
1489  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1490  *
1491  * PARAMETERS : @metadata_buf: metadata buffer
1492  *
1493  * RETURN     :
1494  *
1495  *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf)1496 void QCamera3HardwareInterface::handleMetadataWithLock(
1497     mm_camera_super_buf_t *metadata_buf)
1498 {
1499     ATRACE_CALL();
1500     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1501     int32_t frame_number_valid = *(int32_t *)
1502         POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1503     uint32_t frame_number = *(uint32_t *)
1504         POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
1505     nsecs_t capture_time = *(int64_t *)
1506         POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1507     cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1508         POINTER_OF_META(CAM_INTF_META_FRAME_DROPPED, metadata);
1509     camera3_notify_msg_t notify_msg;
1510 
1511     int32_t urgent_frame_number_valid = *(int32_t *)
1512         POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1513     uint32_t urgent_frame_number = *(uint32_t *)
1514         POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1515 
1516     if (urgent_frame_number_valid) {
1517         CDBG("%s: valid urgent frame_number = %d, capture_time = %lld",
1518           __func__, urgent_frame_number, capture_time);
1519 
1520         //Recieved an urgent Frame Number, handle it
1521         //using partial results
1522         for (List<PendingRequestInfo>::iterator i =
1523             mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1524             CDBG("%s: Iterator Frame = %d urgent frame = %d",
1525                 __func__, i->frame_number, urgent_frame_number);
1526 
1527             if (i->frame_number < urgent_frame_number &&
1528                 i->partial_result_cnt == 0) {
1529                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
1530                     __func__, i->frame_number);
1531             }
1532 
1533             if (i->frame_number == urgent_frame_number &&
1534                      i->bUrgentReceived == 0) {
1535 
1536                 camera3_capture_result_t result;
1537                 memset(&result, 0, sizeof(camera3_capture_result_t));
1538 
1539                 i->partial_result_cnt++;
1540                 i->bUrgentReceived = 1;
1541                 // Extract 3A metadata
1542                 result.result =
1543                     translateCbUrgentMetadataToResultMetadata(metadata);
1544                 // Populate metadata result
1545                 result.frame_number = urgent_frame_number;
1546                 result.num_output_buffers = 0;
1547                 result.output_buffers = NULL;
1548                 result.partial_result = i->partial_result_cnt;
1549 
1550                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1551                 CDBG("%s: urgent frame_number = %d, capture_time = %lld",
1552                      __func__, result.frame_number, capture_time);
1553                 free_camera_metadata((camera_metadata_t *)result.result);
1554                 break;
1555             }
1556         }
1557     }
1558 
1559     if (!frame_number_valid) {
1560         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
1561         mMetadataChannel->bufDone(metadata_buf);
1562         free(metadata_buf);
1563         goto done_metadata;
1564     }
1565     CDBG("%s: valid frame_number = %d, capture_time = %lld", __func__,
1566             frame_number, capture_time);
1567 
1568     // Go through the pending requests info and send shutter/results to frameworks
1569     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1570         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1571         camera3_capture_result_t result;
1572         memset(&result, 0, sizeof(camera3_capture_result_t));
1573 
1574         CDBG("%s: frame_number in the list is %d", __func__, i->frame_number);
1575         i->partial_result_cnt++;
1576         result.partial_result = i->partial_result_cnt;
1577 
1578         // Flush out all entries with less or equal frame numbers.
1579         mPendingRequest--;
1580 
1581         // Check whether any stream buffer corresponding to this is dropped or not
1582         // If dropped, then send the ERROR_BUFFER for the corresponding stream
1583         if (cam_frame_drop.frame_dropped) {
1584             camera3_notify_msg_t notify_msg;
1585             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1586                     j != i->buffers.end(); j++) {
1587                 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1588                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1589                 for (uint32_t k = 0; k < cam_frame_drop.cam_stream_ID.num_streams; k++) {
1590                    if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1591                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1592                        CDBG("%s: Start of reporting error frame#=%d, streamID=%d",
1593                               __func__, i->frame_number, streamID);
1594                        notify_msg.type = CAMERA3_MSG_ERROR;
1595                        notify_msg.message.error.frame_number = i->frame_number;
1596                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1597                        notify_msg.message.error.error_stream = j->stream;
1598                        mCallbackOps->notify(mCallbackOps, &notify_msg);
1599                        CDBG("%s: End of reporting error frame#=%d, streamID=%d",
1600                               __func__, i->frame_number, streamID);
1601                        PendingFrameDropInfo PendingFrameDrop;
1602                        PendingFrameDrop.frame_number=i->frame_number;
1603                        PendingFrameDrop.stream_ID = streamID;
1604                        // Add the Frame drop info to mPendingFrameDropList
1605                        mPendingFrameDropList.push_back(PendingFrameDrop);
1606                    }
1607                 }
1608             }
1609         }
1610 
1611         // Send empty metadata with already filled buffers for dropped metadata
1612         // and send valid metadata with already filled buffers for current metadata
1613         if (i->frame_number < frame_number) {
1614             camera3_notify_msg_t notify_msg;
1615             notify_msg.type = CAMERA3_MSG_SHUTTER;
1616             notify_msg.message.shutter.frame_number = i->frame_number;
1617             notify_msg.message.shutter.timestamp = capture_time -
1618                     (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1619             mCallbackOps->notify(mCallbackOps, &notify_msg);
1620             i->timestamp = notify_msg.message.shutter.timestamp;
1621             CDBG("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1622                     __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1623 
1624             CameraMetadata dummyMetadata;
1625             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1626                     &i->timestamp, 1);
1627             dummyMetadata.update(ANDROID_REQUEST_ID,
1628                     &(i->request_id), 1);
1629             result.result = dummyMetadata.release();
1630         } else {
1631 
1632             // Send shutter notify to frameworks
1633             notify_msg.type = CAMERA3_MSG_SHUTTER;
1634             notify_msg.message.shutter.frame_number = i->frame_number;
1635             notify_msg.message.shutter.timestamp = capture_time;
1636             mCallbackOps->notify(mCallbackOps, &notify_msg);
1637 
1638             i->timestamp = capture_time;
1639 
1640             result.result = translateFromHalMetadata(metadata,
1641                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
1642                     i->capture_intent);
1643 
1644             if (i->blob_request) {
1645                 {
1646                     //Dump tuning metadata if enabled and available
1647                     char prop[PROPERTY_VALUE_MAX];
1648                     memset(prop, 0, sizeof(prop));
1649                     property_get("persist.camera.dumpmetadata", prop, "0");
1650                     int32_t enabled = atoi(prop);
1651                     if (enabled && metadata->is_tuning_params_valid) {
1652                         dumpMetadataToFile(metadata->tuning_params,
1653                                mMetaFrameCount,
1654                                enabled,
1655                                "Snapshot",
1656                                frame_number);
1657                     }
1658                 }
1659 
1660 
1661                 mPictureChannel->queueReprocMetadata(metadata_buf);
1662             } else {
1663                 // Return metadata buffer
1664                 mMetadataChannel->bufDone(metadata_buf);
1665                 free(metadata_buf);
1666             }
1667         }
1668         if (!result.result) {
1669             ALOGE("%s: metadata is NULL", __func__);
1670         }
1671         result.frame_number = i->frame_number;
1672         result.num_output_buffers = 0;
1673         result.output_buffers = NULL;
1674         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1675                     j != i->buffers.end(); j++) {
1676             if (j->buffer) {
1677                 result.num_output_buffers++;
1678             }
1679         }
1680 
1681         if (result.num_output_buffers > 0) {
1682             camera3_stream_buffer_t *result_buffers =
1683                 new camera3_stream_buffer_t[result.num_output_buffers];
1684             if (!result_buffers) {
1685                 ALOGE("%s: Fatal error: out of memory", __func__);
1686             }
1687             size_t result_buffers_idx = 0;
1688             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1689                     j != i->buffers.end(); j++) {
1690                 if (j->buffer) {
1691                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1692                             m != mPendingFrameDropList.end(); m++) {
1693                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1694                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1695                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
1696                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1697                             CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1698                                   __func__, frame_number, streamID);
1699                             m = mPendingFrameDropList.erase(m);
1700                             break;
1701                         }
1702                     }
1703 
1704                     for (List<PendingBufferInfo>::iterator k =
1705                       mPendingBuffersMap.mPendingBufferList.begin();
1706                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1707                       if (k->buffer == j->buffer->buffer) {
1708                         CDBG("%s: Found buffer %p in pending buffer List "
1709                               "for frame %d, Take it out!!", __func__,
1710                                k->buffer, k->frame_number);
1711                         mPendingBuffersMap.num_buffers--;
1712                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
1713                         break;
1714                       }
1715                     }
1716 
1717                     result_buffers[result_buffers_idx++] = *(j->buffer);
1718                     free(j->buffer);
1719                     j->buffer = NULL;
1720                 }
1721             }
1722             result.output_buffers = result_buffers;
1723             mCallbackOps->process_capture_result(mCallbackOps, &result);
1724             CDBG("%s: meta frame_number = %d, capture_time = %lld",
1725                     __func__, result.frame_number, i->timestamp);
1726             free_camera_metadata((camera_metadata_t *)result.result);
1727             delete[] result_buffers;
1728         } else {
1729             mCallbackOps->process_capture_result(mCallbackOps, &result);
1730             CDBG("%s: meta frame_number = %d, capture_time = %lld",
1731                         __func__, result.frame_number, i->timestamp);
1732             free_camera_metadata((camera_metadata_t *)result.result);
1733         }
1734         // erase the element from the list
1735         i = mPendingRequestsList.erase(i);
1736 
1737         if (!mPendingReprocessResultList.empty()) {
1738             handlePendingReprocResults(frame_number + 1);
1739         }
1740     }
1741 
1742 done_metadata:
1743     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1744         i != mPendingRequestsList.end() ;i++) {
1745         i->pipeline_depth++;
1746     }
1747     unblockRequestIfNecessary();
1748 
1749 }
1750 
1751 /*===========================================================================
1752  * FUNCTION   : handleBufferWithLock
1753  *
1754  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1755  *
1756  * PARAMETERS : @buffer: image buffer for the callback
1757  *              @frame_number: frame number of the image buffer
1758  *
1759  * RETURN     :
1760  *
1761  *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)1762 void QCamera3HardwareInterface::handleBufferWithLock(
1763     camera3_stream_buffer_t *buffer, uint32_t frame_number)
1764 {
1765     ATRACE_CALL();
1766     // If the frame number doesn't exist in the pending request list,
1767     // directly send the buffer to the frameworks, and update pending buffers map
1768     // Otherwise, book-keep the buffer.
1769     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1770     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1771         i++;
1772     }
1773     if (i == mPendingRequestsList.end()) {
1774         // Verify all pending requests frame_numbers are greater
1775         for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1776                 j != mPendingRequestsList.end(); j++) {
1777             if (j->frame_number < frame_number) {
1778                 ALOGE("%s: Error: pending frame number %d is smaller than %d",
1779                         __func__, j->frame_number, frame_number);
1780             }
1781         }
1782         camera3_capture_result_t result;
1783         memset(&result, 0, sizeof(camera3_capture_result_t));
1784         result.result = NULL;
1785         result.frame_number = frame_number;
1786         result.num_output_buffers = 1;
1787         result.partial_result = 0;
1788         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1789                 m != mPendingFrameDropList.end(); m++) {
1790             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1791             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1792             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
1793                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1794                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1795                         __func__, frame_number, streamID);
1796                 m = mPendingFrameDropList.erase(m);
1797                 break;
1798             }
1799         }
1800         result.output_buffers = buffer;
1801         CDBG("%s: result frame_number = %d, buffer = %p",
1802                 __func__, frame_number, buffer->buffer);
1803 
1804         for (List<PendingBufferInfo>::iterator k =
1805                 mPendingBuffersMap.mPendingBufferList.begin();
1806                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1807             if (k->buffer == buffer->buffer) {
1808                 CDBG("%s: Found Frame buffer, take it out from list",
1809                         __func__);
1810 
1811                 mPendingBuffersMap.num_buffers--;
1812                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1813                 break;
1814             }
1815         }
1816         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
1817             __func__, mPendingBuffersMap.num_buffers);
1818 
1819         mCallbackOps->process_capture_result(mCallbackOps, &result);
1820     } else {
1821         if (i->input_buffer) {
1822             CameraMetadata settings;
1823             camera3_notify_msg_t notify_msg;
1824             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
1825             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
1826             if(i->settings) {
1827                 settings = i->settings;
1828                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
1829                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
1830                 } else {
1831                     ALOGE("%s: No timestamp in input settings! Using current one.",
1832                             __func__);
1833                 }
1834             } else {
1835                 ALOGE("%s: Input settings missing!", __func__);
1836             }
1837 
1838             notify_msg.type = CAMERA3_MSG_SHUTTER;
1839             notify_msg.message.shutter.frame_number = frame_number;
1840             notify_msg.message.shutter.timestamp = capture_time;
1841 
1842             sp<Fence> releaseFence = new Fence(i->input_buffer->release_fence);
1843             int32_t rc = releaseFence->wait(Fence::TIMEOUT_NEVER);
1844             if (rc != OK) {
1845                 ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
1846             }
1847 
1848             for (List<PendingBufferInfo>::iterator k =
1849                     mPendingBuffersMap.mPendingBufferList.begin();
1850                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1851                 if (k->buffer == buffer->buffer) {
1852                     CDBG("%s: Found Frame buffer, take it out from list",
1853                             __func__);
1854 
1855                     mPendingBuffersMap.num_buffers--;
1856                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
1857                     break;
1858                 }
1859             }
1860             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
1861                 __func__, mPendingBuffersMap.num_buffers);
1862 
1863             bool notifyNow = true;
1864             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1865                     j != mPendingRequestsList.end(); j++) {
1866                 if (j->frame_number < frame_number) {
1867                     notifyNow = false;
1868                     break;
1869                 }
1870             }
1871 
1872             if (notifyNow) {
1873                 camera3_capture_result result;
1874                 memset(&result, 0, sizeof(camera3_capture_result));
1875                 result.frame_number = frame_number;
1876                 result.result = i->settings;
1877                 result.input_buffer = i->input_buffer;
1878                 result.num_output_buffers = 1;
1879                 result.output_buffers = buffer;
1880                 result.partial_result = PARTIAL_RESULT_COUNT;
1881 
1882                 mCallbackOps->notify(mCallbackOps, &notify_msg);
1883                 mCallbackOps->process_capture_result(mCallbackOps, &result);
1884                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
1885                 i = mPendingRequestsList.erase(i);
1886                 mPendingRequest--;
1887             } else {
1888                 // Cache reprocess result for later
1889                 PendingReprocessResult pendingResult;
1890                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
1891                 pendingResult.notify_msg = notify_msg;
1892                 pendingResult.buffer = *buffer;
1893                 pendingResult.frame_number = frame_number;
1894                 mPendingReprocessResultList.push_back(pendingResult);
1895                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
1896             }
1897         } else {
1898             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1899                 j != i->buffers.end(); j++) {
1900                 if (j->stream == buffer->stream) {
1901                     if (j->buffer != NULL) {
1902                         ALOGE("%s: Error: buffer is already set", __func__);
1903                     } else {
1904                         j->buffer = (camera3_stream_buffer_t *)malloc(
1905                             sizeof(camera3_stream_buffer_t));
1906                         *(j->buffer) = *buffer;
1907                         CDBG("%s: cache buffer %p at result frame_number %d",
1908                             __func__, buffer, frame_number);
1909                     }
1910                 }
1911             }
1912         }
1913     }
1914 }
1915 
1916 /*===========================================================================
1917  * FUNCTION   : unblockRequestIfNecessary
1918  *
1919  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1920  *              that mMutex is held when this function is called.
1921  *
1922  * PARAMETERS :
1923  *
1924  * RETURN     :
1925  *
1926  *==========================================================================*/
unblockRequestIfNecessary()1927 void QCamera3HardwareInterface::unblockRequestIfNecessary()
1928 {
1929    // Unblock process_capture_request
1930    pthread_cond_signal(&mRequestCond);
1931 }
1932 
1933 /*===========================================================================
1934  * FUNCTION   : processCaptureRequest
1935  *
1936  * DESCRIPTION: process a capture request from camera service
1937  *
1938  * PARAMETERS :
1939  *   @request : request from framework to process
1940  *
1941  * RETURN     :
1942  *
1943  *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)1944 int QCamera3HardwareInterface::processCaptureRequest(
1945                     camera3_capture_request_t *request)
1946 {
1947     ATRACE_CALL();
1948     int rc = NO_ERROR;
1949     int32_t request_id;
1950     CameraMetadata meta;
1951 
1952     pthread_mutex_lock(&mMutex);
1953 
1954     rc = validateCaptureRequest(request);
1955     if (rc != NO_ERROR) {
1956         ALOGE("%s: incoming request is not valid", __func__);
1957         pthread_mutex_unlock(&mMutex);
1958         return rc;
1959     }
1960 
1961     meta = request->settings;
1962 
1963     // For first capture request, send capture intent, and
1964     // stream on all streams
1965     if (mFirstRequest) {
1966 
1967          /* get eis information for stream configuration */
1968         cam_is_type_t is_type;
1969         char is_type_value[PROPERTY_VALUE_MAX];
1970         property_get("camera.is_type", is_type_value, "0");
1971         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
1972 
1973         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1974             int32_t hal_version = CAM_HAL_V3;
1975             uint8_t captureIntent =
1976                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1977             mCaptureIntent = captureIntent;
1978             memset(mParameters, 0, sizeof(parm_buffer_t));
1979             AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1980                 sizeof(hal_version), &hal_version);
1981             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1982                 sizeof(captureIntent), &captureIntent);
1983         }
1984 
1985         //If EIS is enabled, turn it on for video
1986         //for camera use case, front camcorder and 4k video, no eis
1987         bool setEis = mEisEnable && (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
1988             (mCaptureIntent ==  CAMERA3_TEMPLATE_VIDEO_RECORD ||
1989              mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
1990         int32_t vsMode;
1991         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
1992         rc = AddSetParmEntryToBatch(mParameters,
1993                 CAM_INTF_PARM_DIS_ENABLE,
1994                 sizeof(vsMode), &vsMode);
1995 
1996         //IS type will be 0 unless EIS is supported. If EIS is supported
1997         //it could either be 1 or 4 depending on the stream and video size
1998         if (setEis){
1999             if (m_bIs4KVideo) {
2000                 is_type = IS_TYPE_DIS;
2001             } else {
2002                 is_type = IS_TYPE_EIS_2_0;
2003             }
2004         }
2005 
2006         for (size_t i = 0; i < request->num_output_buffers; i++) {
2007             const camera3_stream_buffer_t& output = request->output_buffers[i];
2008             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2009             /*for livesnapshot stream is_type will be DIS*/
2010             if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2011                 rc = channel->registerBuffer(output.buffer,
2012                         IS_TYPE_DIS, mCaptureIntent);
2013             } else {
2014                 rc = channel->registerBuffer(output.buffer,
2015                         is_type, mCaptureIntent);
2016             }
2017             if (rc < 0) {
2018                 ALOGE("%s: registerBuffer failed",
2019                         __func__);
2020                 pthread_mutex_unlock(&mMutex);
2021                 return -ENODEV;
2022             }
2023         }
2024 
2025         /*set the capture intent, hal version and dis enable parameters to the backend*/
2026         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2027                     mParameters);
2028 
2029 
2030         //First initialize all streams
2031         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2032             it != mStreamInfo.end(); it++) {
2033             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2034             if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2035                 rc = channel->initialize(IS_TYPE_DIS, mCaptureIntent);
2036             } else {
2037                 rc = channel->initialize(is_type, mCaptureIntent);
2038             }
2039             if (NO_ERROR != rc) {
2040                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
2041                 pthread_mutex_unlock(&mMutex);
2042                 return rc;
2043             }
2044         }
2045 
2046         if (mRawDumpChannel) {
2047             rc = mRawDumpChannel->initialize(is_type, mCaptureIntent);
2048             if (rc != NO_ERROR) {
2049                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
2050                 pthread_mutex_unlock(&mMutex);
2051                 return rc;
2052             }
2053         }
2054         if (mSupportChannel) {
2055             rc = mSupportChannel->initialize(is_type, mCaptureIntent);
2056             if (rc < 0) {
2057                 ALOGE("%s: Support channel initialization failed", __func__);
2058                 pthread_mutex_unlock(&mMutex);
2059                 return rc;
2060             }
2061         }
2062 
2063         //Then start them.
2064         CDBG_HIGH("%s: Start META Channel", __func__);
2065         mMetadataChannel->start();
2066 
2067         if (mSupportChannel) {
2068             rc = mSupportChannel->start();
2069             if (rc < 0) {
2070                 ALOGE("%s: Support channel start failed", __func__);
2071                 mMetadataChannel->stop();
2072                 pthread_mutex_unlock(&mMutex);
2073                 return rc;
2074             }
2075         }
2076         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2077             it != mStreamInfo.end(); it++) {
2078             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2079             CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
2080             channel->start();
2081         }
2082 
2083         if (mRawDumpChannel) {
2084             CDBG("%s: Starting raw dump stream",__func__);
2085             rc = mRawDumpChannel->start();
2086             if (rc != NO_ERROR) {
2087                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
2088                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2089                       it != mStreamInfo.end(); it++) {
2090                     QCamera3Channel *channel =
2091                         (QCamera3Channel *)(*it)->stream->priv;
2092                     ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
2093                         channel->getStreamTypeMask());
2094                     channel->stop();
2095                 }
2096                 if (mSupportChannel)
2097                     mSupportChannel->stop();
2098                 mMetadataChannel->stop();
2099                 pthread_mutex_unlock(&mMutex);
2100                 return rc;
2101             }
2102         }
2103         mWokenUpByDaemon = false;
2104         mPendingRequest = 0;
2105     }
2106 
2107     uint32_t frameNumber = request->frame_number;
2108     cam_stream_ID_t streamID;
2109 
2110     if (meta.exists(ANDROID_REQUEST_ID)) {
2111         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
2112         mCurrentRequestId = request_id;
2113         CDBG("%s: Received request with id: %d",__func__, request_id);
2114     } else if (mFirstRequest || mCurrentRequestId == -1){
2115         ALOGE("%s: Unable to find request id field, \
2116                 & no previous id available", __func__);
2117         return NAME_NOT_FOUND;
2118     } else {
2119         CDBG("%s: Re-using old request id", __func__);
2120         request_id = mCurrentRequestId;
2121     }
2122 
2123     CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
2124                                     __func__, __LINE__,
2125                                     request->num_output_buffers,
2126                                     request->input_buffer,
2127                                     frameNumber);
2128     // Acquire all request buffers first
2129     streamID.num_streams = 0;
2130     int blob_request = 0;
2131     uint32_t snapshotStreamId = 0;
2132     for (size_t i = 0; i < request->num_output_buffers; i++) {
2133         const camera3_stream_buffer_t& output = request->output_buffers[i];
2134         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2135         sp<Fence> acquireFence = new Fence(output.acquire_fence);
2136 
2137         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2138             //Call function to store local copy of jpeg data for encode params.
2139             blob_request = 1;
2140             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
2141         }
2142 
2143         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2144         if (rc != OK) {
2145             ALOGE("%s: fence wait failed %d", __func__, rc);
2146             pthread_mutex_unlock(&mMutex);
2147             return rc;
2148         }
2149 
2150         streamID.streamID[streamID.num_streams] =
2151             channel->getStreamID(channel->getStreamTypeMask());
2152         streamID.num_streams++;
2153 
2154 
2155     }
2156 
2157     if (blob_request && mRawDumpChannel) {
2158         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
2159         streamID.streamID[streamID.num_streams] =
2160             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
2161         streamID.num_streams++;
2162     }
2163 
2164     if(request->input_buffer == NULL) {
2165        rc = setFrameParameters(request, streamID, snapshotStreamId);
2166         if (rc < 0) {
2167             ALOGE("%s: fail to set frame parameters", __func__);
2168             pthread_mutex_unlock(&mMutex);
2169             return rc;
2170         }
2171     } else {
2172         sp<Fence> acquireFence = new Fence(request->input_buffer->acquire_fence);
2173 
2174         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2175         if (rc != OK) {
2176             ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
2177             pthread_mutex_unlock(&mMutex);
2178             return rc;
2179         }
2180     }
2181 
2182     /* Update pending request list and pending buffers map */
2183     PendingRequestInfo pendingRequest;
2184     pendingRequest.frame_number = frameNumber;
2185     pendingRequest.num_buffers = request->num_output_buffers;
2186     pendingRequest.request_id = request_id;
2187     pendingRequest.blob_request = blob_request;
2188     pendingRequest.bUrgentReceived = 0;
2189 
2190     pendingRequest.input_buffer = request->input_buffer;
2191     pendingRequest.settings = request->settings;
2192     pendingRequest.pipeline_depth = 0;
2193     pendingRequest.partial_result_cnt = 0;
2194     extractJpegMetadata(pendingRequest.jpegMetadata, request);
2195 
2196     //extract capture intent
2197     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2198         mCaptureIntent =
2199                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2200     }
2201     pendingRequest.capture_intent = mCaptureIntent;
2202 
2203     for (size_t i = 0; i < request->num_output_buffers; i++) {
2204         RequestedBufferInfo requestedBuf;
2205         requestedBuf.stream = request->output_buffers[i].stream;
2206         requestedBuf.buffer = NULL;
2207         pendingRequest.buffers.push_back(requestedBuf);
2208 
2209         // Add to buffer handle the pending buffers list
2210         PendingBufferInfo bufferInfo;
2211         bufferInfo.frame_number = frameNumber;
2212         bufferInfo.buffer = request->output_buffers[i].buffer;
2213         bufferInfo.stream = request->output_buffers[i].stream;
2214         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
2215         mPendingBuffersMap.num_buffers++;
2216         CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
2217           __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
2218           bufferInfo.stream->format);
2219     }
2220     CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2221           __func__, mPendingBuffersMap.num_buffers);
2222 
2223     mPendingRequestsList.push_back(pendingRequest);
2224 
2225     if(mFlush) {
2226         pthread_mutex_unlock(&mMutex);
2227         return NO_ERROR;
2228     }
2229 
2230     // Notify metadata channel we receive a request
2231     mMetadataChannel->request(NULL, frameNumber);
2232 
2233     // Call request on other streams
2234     for (size_t i = 0; i < request->num_output_buffers; i++) {
2235         const camera3_stream_buffer_t& output = request->output_buffers[i];
2236         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2237 
2238         if (channel == NULL) {
2239             ALOGE("%s: invalid channel pointer for stream", __func__);
2240             continue;
2241         }
2242 
2243         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2244             QCamera3RegularChannel* inputChannel = NULL;
2245             if(request->input_buffer != NULL){
2246 
2247                 //Try to get the internal format
2248                 inputChannel = (QCamera3RegularChannel*)
2249                     request->input_buffer->stream->priv;
2250                 if(inputChannel == NULL ){
2251                     ALOGE("%s: failed to get input channel handle", __func__);
2252                     pthread_mutex_unlock(&mMutex);
2253                     return NO_INIT;
2254                 }
2255                 metadata_buffer_t reproc_meta;
2256                 rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
2257                 if (NO_ERROR == rc) {
2258                     rc = channel->request(output.buffer, frameNumber,
2259                             request->input_buffer, &reproc_meta);
2260                     if (rc < 0) {
2261                         ALOGE("%s: Fail to request on picture channel", __func__);
2262                         pthread_mutex_unlock(&mMutex);
2263                         return rc;
2264                     }
2265                 } else {
2266                     ALOGE("%s: fail to set reproc parameters", __func__);
2267                     pthread_mutex_unlock(&mMutex);
2268                     return rc;
2269                 }
2270             } else
2271                 rc = channel->request(output.buffer, frameNumber,
2272                             NULL, mParameters);
2273         } else {
2274             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
2275                 __LINE__, output.buffer, frameNumber);
2276            rc = channel->request(output.buffer, frameNumber);
2277         }
2278         if (rc < 0)
2279             ALOGE("%s: request failed", __func__);
2280     }
2281 
2282     if(request->input_buffer == NULL) {
2283         /*set the parameters to backend*/
2284         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2285     }
2286 
2287     mFirstRequest = false;
2288     // Added a timed condition wait
2289     struct timespec ts;
2290     uint8_t isValidTimeout = 1;
2291     rc = clock_gettime(CLOCK_REALTIME, &ts);
2292     if (rc < 0) {
2293       isValidTimeout = 0;
2294       ALOGE("%s: Error reading the real time clock!!", __func__);
2295     }
2296     else {
2297       // Make timeout as 5 sec for request to be honored
2298       ts.tv_sec += 5;
2299     }
2300     //Block on conditional variable
2301 
2302     mPendingRequest++;
2303     while (mPendingRequest >= MIN_INFLIGHT_REQUESTS) {
2304         if (!isValidTimeout) {
2305             CDBG("%s: Blocking on conditional wait", __func__);
2306             pthread_cond_wait(&mRequestCond, &mMutex);
2307         }
2308         else {
2309             CDBG("%s: Blocking on timed conditional wait", __func__);
2310             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
2311             if (rc == ETIMEDOUT) {
2312                 rc = -ENODEV;
2313                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
2314                 break;
2315             }
2316         }
2317         CDBG("%s: Unblocked", __func__);
2318         if (mWokenUpByDaemon) {
2319             mWokenUpByDaemon = false;
2320             if (mPendingRequest < MAX_INFLIGHT_REQUESTS)
2321                 break;
2322         }
2323     }
2324     pthread_mutex_unlock(&mMutex);
2325 
2326     return rc;
2327 }
2328 
2329 /*===========================================================================
2330  * FUNCTION   : dump
2331  *
2332  * DESCRIPTION:
2333  *
2334  * PARAMETERS :
2335  *
2336  *
2337  * RETURN     :
2338  *==========================================================================*/
dump(int fd)2339 void QCamera3HardwareInterface::dump(int fd)
2340 {
2341     pthread_mutex_lock(&mMutex);
2342     dprintf(fd, "\n Camera HAL3 information Begin \n");
2343 
2344     dprintf(fd, "\nNumber of pending requests: %d \n",
2345         mPendingRequestsList.size());
2346     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2347     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
2348     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2349     for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2350         i != mPendingRequestsList.end(); i++) {
2351         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
2352         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
2353         i->input_buffer);
2354     }
2355     dprintf(fd, "\nPending buffer map: Number of buffers: %d\n",
2356                 mPendingBuffersMap.num_buffers);
2357     dprintf(fd, "-------+-------------\n");
2358     dprintf(fd, " Frame | Stream type \n");
2359     dprintf(fd, "-------+-------------\n");
2360     for(List<PendingBufferInfo>::iterator i =
2361         mPendingBuffersMap.mPendingBufferList.begin();
2362         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
2363         dprintf(fd, " %5d | %11d \n",
2364             i->frame_number, i->stream->stream_type);
2365     }
2366     dprintf(fd, "-------+-------------\n");
2367 
2368     dprintf(fd, "\nPending frame drop list: %d\n",
2369         mPendingFrameDropList.size());
2370     dprintf(fd, "-------+-----------\n");
2371     dprintf(fd, " Frame | Stream ID \n");
2372     dprintf(fd, "-------+-----------\n");
2373     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
2374         i != mPendingFrameDropList.end(); i++) {
2375         dprintf(fd, " %5d | %9d \n",
2376             i->frame_number, i->stream_ID);
2377     }
2378     dprintf(fd, "-------+-----------\n");
2379 
2380     dprintf(fd, "\n Camera HAL3 information End \n");
2381     pthread_mutex_unlock(&mMutex);
2382     return;
2383 }
2384 
2385 /*===========================================================================
2386  * FUNCTION   : flush
2387  *
2388  * DESCRIPTION:
2389  *
2390  * PARAMETERS :
2391  *
2392  *
2393  * RETURN     :
2394  *==========================================================================*/
flush()2395 int QCamera3HardwareInterface::flush()
2396 {
2397     ATRACE_CALL();
2398     unsigned int frameNum = 0;
2399     camera3_notify_msg_t notify_msg;
2400     camera3_capture_result_t result;
2401     camera3_stream_buffer_t *pStream_Buf = NULL;
2402     FlushMap flushMap;
2403 
2404     CDBG("%s: Unblocking Process Capture Request", __func__);
2405     pthread_mutex_lock(&mMutex);
2406     mFlush = true;
2407     pthread_mutex_unlock(&mMutex);
2408 
2409     memset(&result, 0, sizeof(camera3_capture_result_t));
2410 
2411     // Stop the Streams/Channels
2412     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2413         it != mStreamInfo.end(); it++) {
2414         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2415         channel->stop();
2416         (*it)->status = INVALID;
2417     }
2418 
2419     if (mSupportChannel) {
2420         mSupportChannel->stop();
2421     }
2422     if (mRawDumpChannel) {
2423         mRawDumpChannel->stop();
2424     }
2425     if (mMetadataChannel) {
2426         /* If content of mStreamInfo is not 0, there is metadata stream */
2427         mMetadataChannel->stop();
2428     }
2429 
2430     // Mutex Lock
2431     pthread_mutex_lock(&mMutex);
2432 
2433     // Unblock process_capture_request
2434     mPendingRequest = 0;
2435     pthread_cond_signal(&mRequestCond);
2436 
2437     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2438     frameNum = i->frame_number;
2439     CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
2440       __func__, frameNum);
2441 
2442     // Go through the pending buffers and group them depending
2443     // on frame number
2444     for (List<PendingBufferInfo>::iterator k =
2445             mPendingBuffersMap.mPendingBufferList.begin();
2446             k != mPendingBuffersMap.mPendingBufferList.end();) {
2447 
2448         if (k->frame_number < frameNum) {
2449             ssize_t idx = flushMap.indexOfKey(k->frame_number);
2450             if (idx == NAME_NOT_FOUND) {
2451                 Vector<PendingBufferInfo> pending;
2452                 pending.add(*k);
2453                 flushMap.add(k->frame_number, pending);
2454             } else {
2455                 Vector<PendingBufferInfo> &pending =
2456                         flushMap.editValueFor(k->frame_number);
2457                 pending.add(*k);
2458             }
2459 
2460             mPendingBuffersMap.num_buffers--;
2461             k = mPendingBuffersMap.mPendingBufferList.erase(k);
2462         } else {
2463             k++;
2464         }
2465     }
2466 
2467     for (size_t i = 0; i < flushMap.size(); i++) {
2468         uint32_t frame_number = flushMap.keyAt(i);
2469         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2470 
2471         // Send Error notify to frameworks for each buffer for which
2472         // metadata buffer is already sent
2473         CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
2474           __func__, frame_number, pending.size());
2475 
2476         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2477         if (NULL == pStream_Buf) {
2478             ALOGE("%s: No memory for pending buffers array", __func__);
2479             pthread_mutex_unlock(&mMutex);
2480             return NO_MEMORY;
2481         }
2482 
2483         for (size_t j = 0; j < pending.size(); j++) {
2484             const PendingBufferInfo &info = pending.itemAt(j);
2485             notify_msg.type = CAMERA3_MSG_ERROR;
2486             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2487             notify_msg.message.error.error_stream = info.stream;
2488             notify_msg.message.error.frame_number = frame_number;
2489             pStream_Buf[j].acquire_fence = -1;
2490             pStream_Buf[j].release_fence = -1;
2491             pStream_Buf[j].buffer = info.buffer;
2492             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2493             pStream_Buf[j].stream = info.stream;
2494             mCallbackOps->notify(mCallbackOps, &notify_msg);
2495             CDBG("%s: notify frame_number = %d stream %p", __func__,
2496                     frame_number, info.stream);
2497         }
2498 
2499         result.result = NULL;
2500         result.frame_number = frame_number;
2501         result.num_output_buffers = pending.size();
2502         result.output_buffers = pStream_Buf;
2503         mCallbackOps->process_capture_result(mCallbackOps, &result);
2504 
2505         delete [] pStream_Buf;
2506     }
2507 
2508     CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
2509 
2510     flushMap.clear();
2511     for (List<PendingBufferInfo>::iterator k =
2512             mPendingBuffersMap.mPendingBufferList.begin();
2513             k != mPendingBuffersMap.mPendingBufferList.end();) {
2514         ssize_t idx = flushMap.indexOfKey(k->frame_number);
2515         if (idx == NAME_NOT_FOUND) {
2516             Vector<PendingBufferInfo> pending;
2517             pending.add(*k);
2518             flushMap.add(k->frame_number, pending);
2519         } else {
2520             Vector<PendingBufferInfo> &pending =
2521                     flushMap.editValueFor(k->frame_number);
2522             pending.add(*k);
2523         }
2524 
2525         mPendingBuffersMap.num_buffers--;
2526         k = mPendingBuffersMap.mPendingBufferList.erase(k);
2527     }
2528 
2529     // Go through the pending requests info and send error request to framework
2530     for (size_t i = 0; i < flushMap.size(); i++) {
2531         uint32_t frame_number = flushMap.keyAt(i);
2532         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2533         CDBG("%s:Sending ERROR REQUEST for frame %d",
2534               __func__, frame_number);
2535 
2536         // Send shutter notify to frameworks
2537         notify_msg.type = CAMERA3_MSG_ERROR;
2538         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
2539         notify_msg.message.error.error_stream = NULL;
2540         notify_msg.message.error.frame_number = frame_number;
2541         mCallbackOps->notify(mCallbackOps, &notify_msg);
2542 
2543         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2544         if (NULL == pStream_Buf) {
2545             ALOGE("%s: No memory for pending buffers array", __func__);
2546             pthread_mutex_unlock(&mMutex);
2547             return NO_MEMORY;
2548         }
2549 
2550         for (size_t j = 0; j < pending.size(); j++) {
2551             const PendingBufferInfo &info = pending.itemAt(j);
2552             pStream_Buf[j].acquire_fence = -1;
2553             pStream_Buf[j].release_fence = -1;
2554             pStream_Buf[j].buffer = info.buffer;
2555             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2556             pStream_Buf[j].stream = info.stream;
2557         }
2558 
2559         result.num_output_buffers = pending.size();
2560         result.output_buffers = pStream_Buf;
2561         result.result = NULL;
2562         result.frame_number = frame_number;
2563         mCallbackOps->process_capture_result(mCallbackOps, &result);
2564         delete [] pStream_Buf;
2565     }
2566 
2567     /* Reset pending buffer list and requests list */
2568     mPendingRequestsList.clear();
2569     /* Reset pending frame Drop list and requests list */
2570     mPendingFrameDropList.clear();
2571 
2572     flushMap.clear();
2573     mPendingBuffersMap.num_buffers = 0;
2574     mPendingBuffersMap.mPendingBufferList.clear();
2575     mPendingReprocessResultList.clear();
2576     CDBG("%s: Cleared all the pending buffers ", __func__);
2577 
2578     mFlush = false;
2579 
2580     // Start the Streams/Channels
2581     if (mMetadataChannel) {
2582         /* If content of mStreamInfo is not 0, there is metadata stream */
2583         mMetadataChannel->start();
2584     }
2585     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2586         it != mStreamInfo.end(); it++) {
2587         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2588         channel->start();
2589     }
2590     if (mSupportChannel) {
2591         mSupportChannel->start();
2592     }
2593     if (mRawDumpChannel) {
2594         mRawDumpChannel->start();
2595     }
2596 
2597     pthread_mutex_unlock(&mMutex);
2598 
2599     return 0;
2600 }
2601 
2602 /*===========================================================================
2603  * FUNCTION   : captureResultCb
2604  *
2605  * DESCRIPTION: Callback handler for all capture result
2606  *              (streams, as well as metadata)
2607  *
2608  * PARAMETERS :
2609  *   @metadata : metadata information
2610  *   @buffer   : actual gralloc buffer to be returned to frameworks.
2611  *               NULL if metadata.
2612  *
2613  * RETURN     : NONE
2614  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number)2615 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
2616                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2617 {
2618     pthread_mutex_lock(&mMutex);
2619 
2620     /* Assume flush() is called before any reprocessing. Send
2621      * notify and result immediately upon receipt of any callback*/
2622     if (mLoopBackResult) {
2623         /* Send notify */
2624         camera3_notify_msg_t notify_msg;
2625         notify_msg.type = CAMERA3_MSG_SHUTTER;
2626         notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
2627         notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
2628         mCallbackOps->notify(mCallbackOps, &notify_msg);
2629 
2630         /* Send capture result */
2631         mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
2632         free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
2633         free(mLoopBackResult);
2634         mLoopBackResult = NULL;
2635     }
2636 
2637     if (metadata_buf)
2638         handleMetadataWithLock(metadata_buf);
2639     else
2640         handleBufferWithLock(buffer, frame_number);
2641     pthread_mutex_unlock(&mMutex);
2642     return;
2643 }
2644 
2645 /*===========================================================================
2646  * FUNCTION   : translateFromHalMetadata
2647  *
2648  * DESCRIPTION:
2649  *
2650  * PARAMETERS :
2651  *   @metadata : metadata information from callback
2652  *   @timestamp: metadata buffer timestamp
2653  *   @request_id: request id
2654  *   @jpegMetadata: additional jpeg metadata
2655  *
2656  * RETURN     : camera_metadata_t*
2657  *              metadata in a format specified by fwk
2658  *==========================================================================*/
2659 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent)2660 QCamera3HardwareInterface::translateFromHalMetadata(
2661                                  metadata_buffer_t *metadata,
2662                                  nsecs_t timestamp,
2663                                  int32_t request_id,
2664                                  const CameraMetadata& jpegMetadata,
2665                                  uint8_t pipeline_depth,
2666                                  uint8_t capture_intent)
2667 {
2668     CameraMetadata camMetadata;
2669     camera_metadata_t* resultMetadata;
2670 
2671     if (jpegMetadata.entryCount())
2672         camMetadata.append(jpegMetadata);
2673 
2674     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
2675     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
2676     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
2677     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
2678 
2679     if (IS_META_AVAILABLE(CAM_INTF_META_FRAME_NUMBER, metadata)) {
2680         int64_t frame_number = *(uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2681         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
2682     }
2683 
2684 
2685     if (IS_META_AVAILABLE(CAM_INTF_PARM_FPS_RANGE, metadata)) {
2686         int32_t fps_range[2];
2687         cam_fps_range_t * float_range =
2688           (cam_fps_range_t *)POINTER_OF_PARAM(CAM_INTF_PARM_FPS_RANGE, metadata);
2689         fps_range[0] = (int32_t)float_range->min_fps;
2690         fps_range[1] = (int32_t)float_range->max_fps;
2691         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
2692                                       fps_range, 2);
2693         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
2694             __func__, fps_range[0], fps_range[1]);
2695     }
2696 
2697 
2698     if (IS_META_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata)) {
2699         int32_t  *expCompensation =
2700           (int32_t *)POINTER_OF_META(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2701         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2702                                       expCompensation, 1);
2703     }
2704 
2705     if (IS_META_AVAILABLE(CAM_INTF_PARM_BESTSHOT_MODE, metadata)) {
2706         uint8_t sceneMode =
2707                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_BESTSHOT_MODE, metadata));
2708         uint8_t fwkSceneMode =
2709             (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2710             sizeof(SCENE_MODES_MAP)/
2711             sizeof(SCENE_MODES_MAP[0]), sceneMode);
2712         camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2713              &fwkSceneMode, 1);
2714     }
2715 
2716     if (IS_META_AVAILABLE(CAM_INTF_PARM_AEC_LOCK, metadata)) {
2717         uint8_t  ae_lock =
2718                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AEC_LOCK, metadata));
2719         camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2720                 &ae_lock, 1);
2721     }
2722 
2723     if (IS_META_AVAILABLE(CAM_INTF_PARM_AWB_LOCK, metadata)) {
2724         uint8_t awb_lock =
2725                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AWB_LOCK, metadata));
2726         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
2727     }
2728 
2729     if (IS_META_AVAILABLE(CAM_INTF_META_FACE_DETECTION, metadata)){
2730         cam_face_detection_data_t *faceDetectionInfo =
2731             (cam_face_detection_data_t *)POINTER_OF_META(CAM_INTF_META_FACE_DETECTION, metadata);
2732         uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2733         int32_t faceIds[MAX_ROI];
2734         uint8_t faceScores[MAX_ROI];
2735         int32_t faceRectangles[MAX_ROI * 4];
2736         int32_t faceLandmarks[MAX_ROI * 6];
2737         int j = 0, k = 0;
2738         for (int i = 0; i < numFaces; i++) {
2739             faceIds[i] = faceDetectionInfo->faces[i].face_id;
2740             faceScores[i] = faceDetectionInfo->faces[i].score;
2741             convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2742                 faceRectangles+j, -1);
2743             convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2744             j+= 4;
2745             k+= 6;
2746         }
2747         if (numFaces <= 0) {
2748             memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2749             memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2750             memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2751             memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2752         }
2753         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2754         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2755         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2756             faceRectangles, numFaces*4);
2757         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2758             faceLandmarks, numFaces*6);
2759     }
2760     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_MODE, metadata)){
2761         uint8_t  *color_correct_mode =
2762             (uint8_t *)POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2763         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2764     }
2765     if (IS_META_AVAILABLE(CAM_INTF_META_EDGE_MODE, metadata)) {
2766         cam_edge_application_t  *edgeApplication =
2767             (cam_edge_application_t *)POINTER_OF_META(CAM_INTF_META_EDGE_MODE, metadata);
2768         uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2769         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2770         camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2771     }
2772     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_POWER, metadata)) {
2773         uint8_t  *flashPower =
2774             (uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_POWER, metadata);
2775         camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2776     }
2777     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_FIRING_TIME, metadata)) {
2778         int64_t  *flashFiringTime =
2779             (int64_t *)POINTER_OF_META(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2780         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2781     }
2782     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_STATE, metadata)) {
2783         uint8_t  flashState =
2784             *((uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_STATE, metadata));
2785         if (!gCamCapability[mCameraId]->flash_available) {
2786             flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2787         }
2788         camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2789     }
2790     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_MODE, metadata)){
2791         uint8_t flashMode = *((uint8_t*)
2792             POINTER_OF_META(CAM_INTF_META_FLASH_MODE, metadata));
2793         uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2794             sizeof(FLASH_MODES_MAP), flashMode);
2795         camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2796     }
2797     if (IS_META_AVAILABLE(CAM_INTF_META_HOTPIXEL_MODE, metadata)) {
2798         uint8_t  *hotPixelMode =
2799             (uint8_t *)POINTER_OF_META(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2800         camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2801     }
2802     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_APERTURE, metadata)){
2803         float  *lensAperture =
2804             (float *)POINTER_OF_META(CAM_INTF_META_LENS_APERTURE, metadata);
2805         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2806     }
2807     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FILTERDENSITY, metadata)) {
2808         float  *filterDensity =
2809             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2810         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2811     }
2812     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)){
2813         float  *focalLength =
2814             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2815         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2816     }
2817 
2818     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata)) {
2819         uint8_t  *opticalStab =
2820             (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2821         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2822     }
2823     if (IS_META_AVAILABLE(CAM_INTF_PARM_DIS_ENABLE, metadata)) {
2824         uint8_t *vsMode =
2825             (uint8_t *)POINTER_OF_META(CAM_INTF_PARM_DIS_ENABLE, metadata);
2826         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, vsMode, 1);
2827     }
2828 
2829     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
2830         uint8_t  *noiseRedMode =
2831             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2832         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2833     }
2834     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata)) {
2835         uint8_t  *noiseRedStrength =
2836             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2837         camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2838     }
2839     if (IS_META_AVAILABLE(CAM_INTF_META_SCALER_CROP_REGION, metadata)) {
2840         cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
2841             POINTER_OF_META(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2842         int32_t scalerCropRegion[4];
2843         scalerCropRegion[0] = hScalerCropRegion->left;
2844         scalerCropRegion[1] = hScalerCropRegion->top;
2845         scalerCropRegion[2] = hScalerCropRegion->width;
2846         scalerCropRegion[3] = hScalerCropRegion->height;
2847         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2848     }
2849     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)){
2850         int64_t  *sensorExpTime =
2851             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2852         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2853         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2854     }
2855     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata)){
2856         int64_t  *sensorFameDuration =
2857             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2858         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2859         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2860     }
2861     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata)){
2862         int64_t  *sensorRollingShutterSkew =
2863             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
2864                 metadata);
2865         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
2866         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2867                 sensorRollingShutterSkew, 1);
2868     }
2869 
2870     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)){
2871         int32_t sensorSensitivity =
2872             *((int32_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
2873         CDBG("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
2874         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
2875 
2876         //calculate the noise profile based on sensitivity
2877         double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
2878         double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
2879         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
2880         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i +=2) {
2881            noise_profile[i]   = noise_profile_S;
2882            noise_profile[i+1] = noise_profile_O;
2883         }
2884         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
2885              noise_profile_S, noise_profile_O);
2886         camMetadata.update( ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
2887                             2 * gCamCapability[mCameraId]->num_color_channels);
2888     }
2889 
2890 
2891     if (IS_META_AVAILABLE(CAM_INTF_META_SHADING_MODE, metadata)) {
2892         uint8_t  *shadingMode =
2893             (uint8_t *)POINTER_OF_META(CAM_INTF_META_SHADING_MODE, metadata);
2894      camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2895     }
2896     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata)) {
2897         uint8_t  *faceDetectMode =
2898             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2899         uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2900             sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), *faceDetectMode);
2901         camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2902     }
2903     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata)) {
2904         uint8_t  *histogramMode =
2905             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2906          camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2907     }
2908     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata)){
2909        uint8_t  *sharpnessMapMode =
2910           (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2911        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2912                           sharpnessMapMode, 1);
2913     }
2914     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata)){
2915        cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
2916        POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2917        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2918                           (int32_t*)sharpnessMap->sharpness,
2919                           CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2920     }
2921     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP, metadata)) {
2922        cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2923        POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2924        int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2925        int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
2926        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2927                           (float*)lensShadingMap->lens_shading,
2928                           4*map_width*map_height);
2929     }
2930     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_MODE, metadata)) {
2931         uint8_t  *toneMapMode =
2932             (uint8_t *)POINTER_OF_META(CAM_INTF_META_TONEMAP_MODE, metadata);
2933         camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2934     }
2935     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_CURVES, metadata)){
2936         //Populate CAM_INTF_META_TONEMAP_CURVES
2937         /* ch0 = G, ch 1 = B, ch 2 = R*/
2938         cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2939                 POINTER_OF_META(CAM_INTF_META_TONEMAP_CURVES, metadata);
2940         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
2941             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
2942                     __func__, tonemap->tonemap_points_cnt,
2943                     CAM_MAX_TONEMAP_CURVE_SIZE);
2944             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
2945         }
2946 
2947         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2948                         (float*)tonemap->curves[0].tonemap_points,
2949                         tonemap->tonemap_points_cnt * 2);
2950 
2951         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2952                         (float*)tonemap->curves[1].tonemap_points,
2953                         tonemap->tonemap_points_cnt * 2);
2954 
2955         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2956                         (float*)tonemap->curves[2].tonemap_points,
2957                         tonemap->tonemap_points_cnt * 2);
2958     }
2959     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata)){
2960         cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2961             POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2962         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2963     }
2964     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata)){
2965         cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2966         POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2967         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2968             (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2969     }
2970     if (IS_META_AVAILABLE(CAM_INTF_META_PROFILE_TONE_CURVE, metadata)) {
2971         cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
2972                 POINTER_OF_META(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
2973         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
2974             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
2975                     __func__, toneCurve->tonemap_points_cnt,
2976                     CAM_MAX_TONEMAP_CURVE_SIZE);
2977             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
2978         }
2979         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
2980                 (float*)toneCurve->curve.tonemap_points,
2981                 toneCurve->tonemap_points_cnt * 2);
2982     }
2983     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata)){
2984         cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2985             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2986         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2987             predColorCorrectionGains->gains, 4);
2988     }
2989     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata)){
2990         cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2991             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2992         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2993             (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2994     }
2995     if (IS_META_AVAILABLE(CAM_INTF_META_OTP_WB_GRGB, metadata)) {
2996         float *otpWbGrGb = (float*) POINTER_OF_META(
2997                 CAM_INTF_META_OTP_WB_GRGB, metadata);
2998         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2999     }
3000     if (IS_META_AVAILABLE(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata)){
3001         uint8_t *blackLevelLock = (uint8_t*)
3002             POINTER_OF_META(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
3003         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
3004     }
3005     if (IS_META_AVAILABLE(CAM_INTF_META_SCENE_FLICKER, metadata)){
3006         uint8_t *sceneFlicker = (uint8_t*)
3007             POINTER_OF_META(CAM_INTF_META_SCENE_FLICKER, metadata);
3008         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
3009     }
3010     if (IS_META_AVAILABLE(CAM_INTF_PARM_EFFECT, metadata)) {
3011         uint8_t *effectMode = (uint8_t*)
3012             POINTER_OF_META(CAM_INTF_PARM_EFFECT, metadata);
3013         uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
3014                                             sizeof(EFFECT_MODES_MAP),
3015                                             *effectMode);
3016         camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
3017     }
3018     if (IS_META_AVAILABLE(CAM_INTF_META_TEST_PATTERN_DATA, metadata)) {
3019         cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
3020             POINTER_OF_META(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
3021         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
3022                 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3023                 testPatternData->mode);
3024         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
3025                 &fwk_testPatternMode, 1);
3026         int32_t fwk_testPatternData[4];
3027         fwk_testPatternData[0] = testPatternData->r;
3028         fwk_testPatternData[3] = testPatternData->b;
3029         switch (gCamCapability[mCameraId]->color_arrangement) {
3030         case CAM_FILTER_ARRANGEMENT_RGGB:
3031         case CAM_FILTER_ARRANGEMENT_GRBG:
3032             fwk_testPatternData[1] = testPatternData->gr;
3033             fwk_testPatternData[2] = testPatternData->gb;
3034             break;
3035         case CAM_FILTER_ARRANGEMENT_GBRG:
3036         case CAM_FILTER_ARRANGEMENT_BGGR:
3037             fwk_testPatternData[2] = testPatternData->gr;
3038             fwk_testPatternData[1] = testPatternData->gb;
3039             break;
3040         default:
3041             ALOGE("%s: color arrangement %d is not supported", __func__,
3042                 gCamCapability[mCameraId]->color_arrangement);
3043             break;
3044         }
3045         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
3046     }
3047     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
3048         double *gps_coords = (double *)POINTER_OF_META(
3049             CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
3050         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
3051     }
3052     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
3053         char *gps_methods = (char *)POINTER_OF_META(
3054             CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
3055         String8 str(gps_methods);
3056         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
3057     }
3058     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
3059         int64_t *gps_timestamp = (int64_t *)POINTER_OF_META(
3060                 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
3061         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
3062     }
3063     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
3064         int32_t *jpeg_orientation = (int32_t *)POINTER_OF_META(
3065                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
3066         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
3067     }
3068     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
3069         uint8_t *jpeg_quality = (uint8_t *)POINTER_OF_META(
3070                 CAM_INTF_META_JPEG_QUALITY, metadata);
3071         camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
3072     }
3073     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
3074         uint8_t *thumb_quality = (uint8_t *)POINTER_OF_META(
3075                 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
3076         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
3077     }
3078     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
3079         cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF_META(
3080                 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
3081     }
3082     if (IS_META_AVAILABLE(CAM_INTF_META_PRIVATE_DATA, metadata)) {
3083         int32_t *privateData = (int32_t *)
3084                 POINTER_OF_META(CAM_INTF_META_PRIVATE_DATA, metadata);
3085         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
3086                 privateData, MAX_METADATA_PRIVATE_PAYLOAD_SIZE);
3087     }
3088     if (metadata->is_tuning_params_valid) {
3089         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
3090         uint8_t *data = (uint8_t*)&tuning_meta_data_blob[0];
3091         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
3092 
3093 
3094         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_data_version),
3095                 sizeof(uint32_t));
3096         data += sizeof(uint32_t);
3097 
3098         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_sensor_data_size),
3099                 sizeof(uint32_t));
3100         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3101         data += sizeof(uint32_t);
3102 
3103         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
3104                 sizeof(uint32_t));
3105         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3106         data += sizeof(uint32_t);
3107 
3108         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
3109                 sizeof(uint32_t));
3110         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3111         data += sizeof(uint32_t);
3112 
3113         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
3114                 sizeof(uint32_t));
3115         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3116         data += sizeof(uint32_t);
3117 
3118         metadata->tuning_params.tuning_mod3_data_size = 0;
3119         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
3120                 sizeof(uint32_t));
3121         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3122         data += sizeof(uint32_t);
3123 
3124         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
3125                 metadata->tuning_params.tuning_sensor_data_size);
3126         data += metadata->tuning_params.tuning_sensor_data_size;
3127 
3128         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
3129                 metadata->tuning_params.tuning_vfe_data_size);
3130         data += metadata->tuning_params.tuning_vfe_data_size;
3131 
3132         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
3133                 metadata->tuning_params.tuning_cpp_data_size);
3134         data += metadata->tuning_params.tuning_cpp_data_size;
3135 
3136 
3137         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
3138                 metadata->tuning_params.tuning_cac_data_size);
3139         data += metadata->tuning_params.tuning_cac_data_size;
3140 
3141         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
3142             (int32_t*)tuning_meta_data_blob, (data-tuning_meta_data_blob)/sizeof(uint32_t));
3143     }
3144     if (IS_META_AVAILABLE(CAM_INTF_META_NEUTRAL_COL_POINT, metadata)) {
3145         cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
3146                 POINTER_OF_META(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
3147         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3148                 (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
3149     }
3150 
3151     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata)) {
3152          uint8_t  shadingMapMode =
3153                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata));
3154          camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
3155     }
3156 
3157     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_ROI, metadata)) {
3158         cam_area_t  *hAeRegions =
3159                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AEC_ROI, metadata);
3160         int32_t aeRegions[5];
3161         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
3162         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
3163         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3164                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
3165                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
3166                 hAeRegions->rect.height);
3167     }
3168 
3169     if (IS_META_AVAILABLE(CAM_INTF_META_AF_ROI, metadata)) {
3170         /*af regions*/
3171         cam_area_t  *hAfRegions =
3172                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AF_ROI, metadata);
3173         int32_t afRegions[5];
3174         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
3175         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
3176         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3177                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
3178                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
3179                 hAfRegions->rect.height);
3180     }
3181 
3182     if (IS_META_AVAILABLE(CAM_INTF_PARM_ANTIBANDING, metadata)) {
3183         uint8_t hal_ab_mode =
3184                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_ANTIBANDING, metadata));
3185         uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
3186                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3187                 hal_ab_mode);
3188         camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
3189                 &fwk_ab_mode, 1);
3190     }
3191 
3192     if (IS_META_AVAILABLE(CAM_INTF_META_MODE, metadata)) {
3193          uint8_t mode =
3194                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_MODE, metadata));
3195          camMetadata.update(ANDROID_CONTROL_MODE, &mode, 1);
3196     }
3197 
3198     /* Constant metadata values to be update*/
3199     uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
3200     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
3201 
3202     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
3203     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
3204 
3205     int32_t hotPixelMap[2];
3206     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
3207 
3208     uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3209     camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3210 
3211     // CDS
3212     if (IS_META_AVAILABLE(CAM_INTF_PARM_CDS_MODE, metadata)) {
3213         cam_cds_mode_type_t *cds = (cam_cds_mode_type_t *)
3214                 POINTER_OF_META(CAM_INTF_PARM_CDS_MODE, metadata);
3215         int32_t mode = *cds;
3216         camMetadata.update(QCAMERA3_CDS_MODE,
3217                 &mode, 1);
3218     }
3219 
3220     // Reprocess crop data
3221     if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, metadata)) {
3222         cam_crop_data_t *crop_data = (cam_crop_data_t *)
3223                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, metadata);
3224         uint8_t cnt = crop_data->num_of_streams;
3225         if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
3226             int rc = NO_ERROR;
3227             int32_t *crop = new int32_t[cnt*4];
3228             if (NULL == crop) {
3229                 rc = NO_MEMORY;
3230             }
3231 
3232             int32_t *crop_stream_ids = new int32_t[cnt];
3233             if (NULL == crop_stream_ids) {
3234                 rc = NO_MEMORY;
3235             }
3236 
3237             if (NO_ERROR == rc) {
3238                 int32_t steams_found = 0;
3239                 for (size_t i = 0; i < cnt; i++) {
3240                     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3241                         it != mStreamInfo.end(); it++) {
3242                         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3243                         if (NULL != channel) {
3244                             if (crop_data->crop_info[i].stream_id ==
3245                                     channel->mStreams[0]->getMyServerID()) {
3246                                 crop[steams_found*4] = crop_data->crop_info[i].crop.left;
3247                                 crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top;
3248                                 crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width;
3249                                 crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height;
3250                                 // In a more general case we may want to generate
3251                                 // unique id depending on width, height, stream, private
3252                                 // data etc.
3253                                 crop_stream_ids[steams_found] = (int32_t)(*it)->stream;
3254                                 steams_found++;
3255                                 CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
3256                                         __func__,
3257                                         (*it)->stream,
3258                                         crop_data->crop_info[i].crop.left,
3259                                         crop_data->crop_info[i].crop.top,
3260                                         crop_data->crop_info[i].crop.width,
3261                                         crop_data->crop_info[i].crop.height);
3262                                 break;
3263                             }
3264                         }
3265                     }
3266                 }
3267 
3268                 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
3269                         &steams_found, 1);
3270                 camMetadata.update(QCAMERA3_CROP_REPROCESS,
3271                         crop, steams_found*4);
3272                 camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS,
3273                         crop_stream_ids, steams_found);
3274             }
3275 
3276             if (crop) {
3277                 delete [] crop;
3278             }
3279             if (crop_stream_ids) {
3280                 delete [] crop_stream_ids;
3281             }
3282         } else {
3283             // mm-qcamera-daemon only posts crop_data for streams
3284             // not linked to pproc. So no valid crop metadata is not
3285             // necessarily an error case.
3286             CDBG("%s: No valid crop metadata entries", __func__);
3287         }
3288     }
3289 
3290     if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_CAC, metadata)) {
3291         cam_aberration_mode_t  *cacMode = (cam_aberration_mode_t *)
3292                 POINTER_OF_PARAM(CAM_INTF_PARM_CAC, metadata);
3293         int32_t cac = lookupFwkName(COLOR_ABERRATION_MAP,
3294                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
3295                 *cacMode);
3296         if (NAME_NOT_FOUND != cac) {
3297             uint8_t val = (uint8_t) cac;
3298             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
3299                     &val,
3300                     1);
3301         } else {
3302             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
3303         }
3304     }
3305 
3306     resultMetadata = camMetadata.release();
3307     return resultMetadata;
3308 }
3309 
3310 /*===========================================================================
3311  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
3312  *
3313  * DESCRIPTION:
3314  *
3315  * PARAMETERS :
3316  *   @metadata : metadata information from callback
3317  *
3318  * RETURN     : camera_metadata_t*
3319  *              metadata in a format specified by fwk
3320  *==========================================================================*/
3321 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)3322 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
3323                                 (metadata_buffer_t *metadata)
3324 {
3325     CameraMetadata camMetadata;
3326     camera_metadata_t* resultMetadata;
3327     uint8_t aeMode = CAM_AE_MODE_MAX;
3328     int32_t *flashMode = NULL;
3329     int32_t *redeye = NULL;
3330 
3331     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_STATE, metadata)) {
3332         uint8_t *ae_state = (uint8_t *)
3333             POINTER_OF_META(CAM_INTF_META_AEC_STATE, metadata);
3334         camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
3335         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
3336     }
3337 
3338     if (IS_META_AVAILABLE(CAM_INTF_META_AF_STATE, metadata)) {
3339         uint8_t  *afState = (uint8_t *)
3340             POINTER_OF_META(CAM_INTF_META_AF_STATE, metadata);
3341         camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
3342         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %d", __func__, *afState);
3343     }
3344 
3345     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata)) {
3346         float  *focusDistance =
3347             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
3348         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
3349     }
3350 
3351     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_RANGE, metadata)) {
3352         float  *focusRange =
3353             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
3354         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
3355     }
3356 
3357     if (IS_META_AVAILABLE(CAM_INTF_META_AWB_STATE, metadata)) {
3358         uint8_t  *whiteBalanceState = (uint8_t *)
3359             POINTER_OF_META(CAM_INTF_META_AWB_STATE, metadata);
3360         camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
3361         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
3362     }
3363 
3364     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata)) {
3365         cam_trigger_t *aecTrigger =
3366                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata);
3367         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
3368                 &aecTrigger->trigger, 1);
3369         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
3370                 &aecTrigger->trigger_id, 1);
3371     }
3372 
3373     if (IS_META_AVAILABLE(CAM_INTF_PARM_FOCUS_MODE, metadata)) {
3374         uint8_t  *focusMode = (uint8_t *)
3375             POINTER_OF_META(CAM_INTF_PARM_FOCUS_MODE, metadata);
3376         uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3377             sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
3378         camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
3379     }
3380 
3381     if (IS_META_AVAILABLE(CAM_INTF_META_AF_TRIGGER, metadata)) {
3382         cam_trigger_t *af_trigger =
3383                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AF_TRIGGER, metadata);
3384         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
3385                 &af_trigger->trigger, 1);
3386         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
3387     }
3388 
3389     if (IS_META_AVAILABLE(CAM_INTF_PARM_WHITE_BALANCE, metadata)) {
3390         uint8_t  *whiteBalance = (uint8_t *)
3391             POINTER_OF_META(CAM_INTF_PARM_WHITE_BALANCE, metadata);
3392         uint8_t fwkWhiteBalanceMode =
3393             (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3394                 sizeof(WHITE_BALANCE_MODES_MAP)/
3395                 sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
3396         camMetadata.update(ANDROID_CONTROL_AWB_MODE,
3397             &fwkWhiteBalanceMode, 1);
3398     }
3399 
3400     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_MODE, metadata)) {
3401         aeMode = *((uint32_t*) POINTER_OF_META(CAM_INTF_META_AEC_MODE, metadata));
3402     }
3403     if (IS_META_AVAILABLE(CAM_INTF_PARM_LED_MODE, metadata)) {
3404         flashMode = (int32_t*)
3405                 POINTER_OF_PARAM(CAM_INTF_PARM_LED_MODE, metadata);
3406     }
3407     if (IS_META_AVAILABLE(CAM_INTF_PARM_REDEYE_REDUCTION, metadata)) {
3408         redeye = (int32_t*)
3409                 POINTER_OF_PARAM(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
3410     }
3411 
3412     uint8_t fwk_aeMode;
3413     if (redeye != NULL && *redeye == 1) {
3414         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3415         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3416     } else if (flashMode != NULL &&
3417             ((*flashMode == CAM_FLASH_MODE_AUTO)||
3418              (*flashMode == CAM_FLASH_MODE_ON))) {
3419         fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
3420                 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
3421         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3422     } else if (aeMode == CAM_AE_MODE_ON) {
3423         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
3424         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3425     } else if (aeMode == CAM_AE_MODE_OFF) {
3426         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
3427         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3428     } else {
3429         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%d!!!",__func__,
3430               redeye, flashMode, aeMode);
3431     }
3432 
3433     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_STATE, metadata)) {
3434         uint8_t *lensState = (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_STATE, metadata);
3435         camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
3436     }
3437 
3438     resultMetadata = camMetadata.release();
3439     return resultMetadata;
3440 }
3441 
3442 /*===========================================================================
3443  * FUNCTION   : dumpMetadataToFile
3444  *
3445  * DESCRIPTION: Dumps tuning metadata to file system
3446  *
3447  * PARAMETERS :
3448  *   @meta           : tuning metadata
3449  *   @dumpFrameCount : current dump frame count
3450  *   @enabled        : Enable mask
3451  *
3452  *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,int32_t enabled,const char * type,uint32_t frameNumber)3453 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
3454                                                    uint32_t &dumpFrameCount,
3455                                                    int32_t enabled,
3456                                                    const char *type,
3457                                                    uint32_t frameNumber)
3458 {
3459     uint32_t frm_num = 0;
3460 
3461     //Some sanity checks
3462     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
3463         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
3464               __func__,
3465               meta.tuning_sensor_data_size,
3466               TUNING_SENSOR_DATA_MAX);
3467         return;
3468     }
3469 
3470     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
3471         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
3472               __func__,
3473               meta.tuning_vfe_data_size,
3474               TUNING_VFE_DATA_MAX);
3475         return;
3476     }
3477 
3478     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
3479         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
3480               __func__,
3481               meta.tuning_cpp_data_size,
3482               TUNING_CPP_DATA_MAX);
3483         return;
3484     }
3485 
3486     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
3487         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
3488               __func__,
3489               meta.tuning_cac_data_size,
3490               TUNING_CAC_DATA_MAX);
3491         return;
3492     }
3493     //
3494 
3495     if(enabled){
3496         char timeBuf[FILENAME_MAX];
3497         char buf[FILENAME_MAX];
3498         memset(buf, 0, sizeof(buf));
3499         memset(timeBuf, 0, sizeof(timeBuf));
3500         time_t current_time;
3501         struct tm * timeinfo;
3502         time (&current_time);
3503         timeinfo = localtime (&current_time);
3504         strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
3505         String8 filePath(timeBuf);
3506         snprintf(buf,
3507                 sizeof(buf),
3508                 "%dm_%s_%d.bin",
3509                 dumpFrameCount,
3510                 type,
3511                 frameNumber);
3512         filePath.append(buf);
3513         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
3514         if (file_fd >= 0) {
3515             int written_len = 0;
3516             meta.tuning_data_version = TUNING_DATA_VERSION;
3517             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
3518             written_len += write(file_fd, data, sizeof(uint32_t));
3519             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
3520             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3521             written_len += write(file_fd, data, sizeof(uint32_t));
3522             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
3523             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3524             written_len += write(file_fd, data, sizeof(uint32_t));
3525             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
3526             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3527             written_len += write(file_fd, data, sizeof(uint32_t));
3528             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
3529             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3530             written_len += write(file_fd, data, sizeof(uint32_t));
3531             meta.tuning_mod3_data_size = 0;
3532             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
3533             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3534             written_len += write(file_fd, data, sizeof(uint32_t));
3535             int total_size = meta.tuning_sensor_data_size;
3536             data = (void *)((uint8_t *)&meta.data);
3537             written_len += write(file_fd, data, total_size);
3538             total_size = meta.tuning_vfe_data_size;
3539             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
3540             written_len += write(file_fd, data, total_size);
3541             total_size = meta.tuning_cpp_data_size;
3542             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
3543             written_len += write(file_fd, data, total_size);
3544             total_size = meta.tuning_cac_data_size;
3545             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
3546             written_len += write(file_fd, data, total_size);
3547             close(file_fd);
3548         }else {
3549             ALOGE("%s: fail to open file for metadata dumping", __func__);
3550         }
3551     }
3552 }
3553 
3554 /*===========================================================================
3555  * FUNCTION   : cleanAndSortStreamInfo
3556  *
3557  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
3558  *              and sort them such that raw stream is at the end of the list
3559  *              This is a workaround for camera daemon constraint.
3560  *
3561  * PARAMETERS : None
3562  *
3563  *==========================================================================*/
cleanAndSortStreamInfo()3564 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
3565 {
3566     List<stream_info_t *> newStreamInfo;
3567 
3568     /*clean up invalid streams*/
3569     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
3570             it != mStreamInfo.end();) {
3571         if(((*it)->status) == INVALID){
3572             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
3573             delete channel;
3574             free(*it);
3575             it = mStreamInfo.erase(it);
3576         } else {
3577             it++;
3578         }
3579     }
3580 
3581     // Move preview/video/callback/snapshot streams into newList
3582     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3583             it != mStreamInfo.end();) {
3584         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
3585                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
3586                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
3587             newStreamInfo.push_back(*it);
3588             it = mStreamInfo.erase(it);
3589         } else
3590             it++;
3591     }
3592     // Move raw streams into newList
3593     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3594             it != mStreamInfo.end();) {
3595         newStreamInfo.push_back(*it);
3596         it = mStreamInfo.erase(it);
3597     }
3598 
3599     mStreamInfo = newStreamInfo;
3600 }
3601 
3602 /*===========================================================================
3603  * FUNCTION   : extractJpegMetadata
3604  *
3605  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
3606  *              JPEG metadata is cached in HAL, and return as part of capture
3607  *              result when metadata is returned from camera daemon.
3608  *
3609  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
3610  *              @request:      capture request
3611  *
3612  *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)3613 void QCamera3HardwareInterface::extractJpegMetadata(
3614         CameraMetadata& jpegMetadata,
3615         const camera3_capture_request_t *request)
3616 {
3617     CameraMetadata frame_settings;
3618     frame_settings = request->settings;
3619 
3620     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
3621         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
3622                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
3623                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
3624 
3625     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
3626         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
3627                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
3628                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
3629 
3630     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
3631         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
3632                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
3633                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
3634 
3635     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
3636         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
3637                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
3638                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
3639 
3640     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
3641         jpegMetadata.update(ANDROID_JPEG_QUALITY,
3642                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
3643                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
3644 
3645     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
3646         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
3647                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
3648                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
3649 
3650     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
3651         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
3652                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
3653                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
3654 }
3655 
3656 /*===========================================================================
3657  * FUNCTION   : convertToRegions
3658  *
3659  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
3660  *
3661  * PARAMETERS :
3662  *   @rect   : cam_rect_t struct to convert
3663  *   @region : int32_t destination array
3664  *   @weight : if we are converting from cam_area_t, weight is valid
3665  *             else weight = -1
3666  *
3667  *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)3668 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
3669     region[0] = rect.left;
3670     region[1] = rect.top;
3671     region[2] = rect.left + rect.width;
3672     region[3] = rect.top + rect.height;
3673     if (weight > -1) {
3674         region[4] = weight;
3675     }
3676 }
3677 
3678 /*===========================================================================
3679  * FUNCTION   : convertFromRegions
3680  *
3681  * DESCRIPTION: helper method to convert from array to cam_rect_t
3682  *
3683  * PARAMETERS :
3684  *   @rect   : cam_rect_t struct to convert
3685  *   @region : int32_t destination array
3686  *   @weight : if we are converting from cam_area_t, weight is valid
3687  *             else weight = -1
3688  *
3689  *==========================================================================*/
convertFromRegions(cam_area_t * roi,const camera_metadata_t * settings,uint32_t tag)3690 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
3691                                                    const camera_metadata_t *settings,
3692                                                    uint32_t tag){
3693     CameraMetadata frame_settings;
3694     frame_settings = settings;
3695     int32_t x_min = frame_settings.find(tag).data.i32[0];
3696     int32_t y_min = frame_settings.find(tag).data.i32[1];
3697     int32_t x_max = frame_settings.find(tag).data.i32[2];
3698     int32_t y_max = frame_settings.find(tag).data.i32[3];
3699     roi->weight = frame_settings.find(tag).data.i32[4];
3700     roi->rect.left = x_min;
3701     roi->rect.top = y_min;
3702     roi->rect.width = x_max - x_min;
3703     roi->rect.height = y_max - y_min;
3704 }
3705 
3706 /*===========================================================================
3707  * FUNCTION   : resetIfNeededROI
3708  *
3709  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
3710  *              crop region
3711  *
3712  * PARAMETERS :
3713  *   @roi       : cam_area_t struct to resize
3714  *   @scalerCropRegion : cam_crop_region_t region to compare against
3715  *
3716  *
3717  *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)3718 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
3719                                                  const cam_crop_region_t* scalerCropRegion)
3720 {
3721     int32_t roi_x_max = roi->rect.width + roi->rect.left;
3722     int32_t roi_y_max = roi->rect.height + roi->rect.top;
3723     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
3724     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
3725 
3726     /* According to spec weight = 0 is used to indicate roi needs to be disabled
3727      * without having this check the calculations below to validate if the roi
3728      * is inside scalar crop region will fail resulting in the roi not being
3729      * reset causing algorithm to continue to use stale roi window
3730      */
3731     if (roi->weight == 0) {
3732         return true;
3733     }
3734 
3735     if ((roi_x_max < scalerCropRegion->left) ||
3736         // right edge of roi window is left of scalar crop's left edge
3737         (roi_y_max < scalerCropRegion->top)  ||
3738         // bottom edge of roi window is above scalar crop's top edge
3739         (roi->rect.left > crop_x_max) ||
3740         // left edge of roi window is beyond(right) of scalar crop's right edge
3741         (roi->rect.top > crop_y_max)){
3742         // top edge of roi windo is above scalar crop's top edge
3743         return false;
3744     }
3745     if (roi->rect.left < scalerCropRegion->left) {
3746         roi->rect.left = scalerCropRegion->left;
3747     }
3748     if (roi->rect.top < scalerCropRegion->top) {
3749         roi->rect.top = scalerCropRegion->top;
3750     }
3751     if (roi_x_max > crop_x_max) {
3752         roi_x_max = crop_x_max;
3753     }
3754     if (roi_y_max > crop_y_max) {
3755         roi_y_max = crop_y_max;
3756     }
3757     roi->rect.width = roi_x_max - roi->rect.left;
3758     roi->rect.height = roi_y_max - roi->rect.top;
3759     return true;
3760 }
3761 
3762 /*===========================================================================
3763  * FUNCTION   : convertLandmarks
3764  *
3765  * DESCRIPTION: helper method to extract the landmarks from face detection info
3766  *
3767  * PARAMETERS :
3768  *   @face   : cam_rect_t struct to convert
3769  *   @landmarks : int32_t destination array
3770  *
3771  *
3772  *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)3773 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
3774 {
3775     landmarks[0] = face.left_eye_center.x;
3776     landmarks[1] = face.left_eye_center.y;
3777     landmarks[2] = face.right_eye_center.x;
3778     landmarks[3] = face.right_eye_center.y;
3779     landmarks[4] = face.mouth_center.x;
3780     landmarks[5] = face.mouth_center.y;
3781 }
3782 
3783 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
3784 /*===========================================================================
3785  * FUNCTION   : initCapabilities
3786  *
3787  * DESCRIPTION: initialize camera capabilities in static data struct
3788  *
3789  * PARAMETERS :
3790  *   @cameraId  : camera Id
3791  *
3792  * RETURN     : int32_t type of status
3793  *              NO_ERROR  -- success
3794  *              none-zero failure code
3795  *==========================================================================*/
initCapabilities(int cameraId)3796 int QCamera3HardwareInterface::initCapabilities(int cameraId)
3797 {
3798     int rc = 0;
3799     mm_camera_vtbl_t *cameraHandle = NULL;
3800     QCamera3HeapMemory *capabilityHeap = NULL;
3801 
3802     cameraHandle = camera_open(cameraId);
3803     if (!cameraHandle) {
3804         ALOGE("%s: camera_open failed", __func__);
3805         rc = -1;
3806         goto open_failed;
3807     }
3808 
3809     capabilityHeap = new QCamera3HeapMemory();
3810     if (capabilityHeap == NULL) {
3811         ALOGE("%s: creation of capabilityHeap failed", __func__);
3812         goto heap_creation_failed;
3813     }
3814     /* Allocate memory for capability buffer */
3815     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
3816     if(rc != OK) {
3817         ALOGE("%s: No memory for cappability", __func__);
3818         goto allocate_failed;
3819     }
3820 
3821     /* Map memory for capability buffer */
3822     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
3823     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
3824                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
3825                                 capabilityHeap->getFd(0),
3826                                 sizeof(cam_capability_t));
3827     if(rc < 0) {
3828         ALOGE("%s: failed to map capability buffer", __func__);
3829         goto map_failed;
3830     }
3831 
3832     /* Query Capability */
3833     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
3834     if(rc < 0) {
3835         ALOGE("%s: failed to query capability",__func__);
3836         goto query_failed;
3837     }
3838     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
3839     if (!gCamCapability[cameraId]) {
3840         ALOGE("%s: out of memory", __func__);
3841         goto query_failed;
3842     }
3843     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
3844                                         sizeof(cam_capability_t));
3845     rc = 0;
3846 
3847 query_failed:
3848     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
3849                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
3850 map_failed:
3851     capabilityHeap->deallocate();
3852 allocate_failed:
3853     delete capabilityHeap;
3854 heap_creation_failed:
3855     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
3856     cameraHandle = NULL;
3857 open_failed:
3858     return rc;
3859 }
3860 
3861 /*===========================================================================
3862  * FUNCTION   : initParameters
3863  *
3864  * DESCRIPTION: initialize camera parameters
3865  *
3866  * PARAMETERS :
3867  *
3868  * RETURN     : int32_t type of status
3869  *              NO_ERROR  -- success
3870  *              none-zero failure code
3871  *==========================================================================*/
initParameters()3872 int QCamera3HardwareInterface::initParameters()
3873 {
3874     int rc = 0;
3875 
3876     //Allocate Set Param Buffer
3877     mParamHeap = new QCamera3HeapMemory();
3878     rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
3879     if(rc != OK) {
3880         rc = NO_MEMORY;
3881         ALOGE("Failed to allocate SETPARM Heap memory");
3882         delete mParamHeap;
3883         mParamHeap = NULL;
3884         return rc;
3885     }
3886 
3887     //Map memory for parameters buffer
3888     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
3889             CAM_MAPPING_BUF_TYPE_PARM_BUF,
3890             mParamHeap->getFd(0),
3891             sizeof(metadata_buffer_t));
3892     if(rc < 0) {
3893         ALOGE("%s:failed to map SETPARM buffer",__func__);
3894         rc = FAILED_TRANSACTION;
3895         mParamHeap->deallocate();
3896         delete mParamHeap;
3897         mParamHeap = NULL;
3898         return rc;
3899     }
3900 
3901     mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
3902     return rc;
3903 }
3904 
3905 /*===========================================================================
3906  * FUNCTION   : deinitParameters
3907  *
3908  * DESCRIPTION: de-initialize camera parameters
3909  *
3910  * PARAMETERS :
3911  *
3912  * RETURN     : NONE
3913  *==========================================================================*/
deinitParameters()3914 void QCamera3HardwareInterface::deinitParameters()
3915 {
3916     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
3917             CAM_MAPPING_BUF_TYPE_PARM_BUF);
3918 
3919     mParamHeap->deallocate();
3920     delete mParamHeap;
3921     mParamHeap = NULL;
3922 
3923     mParameters = NULL;
3924 }
3925 
3926 /*===========================================================================
3927  * FUNCTION   : calcMaxJpegSize
3928  *
3929  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
3930  *
3931  * PARAMETERS :
3932  *
3933  * RETURN     : max_jpeg_size
3934  *==========================================================================*/
calcMaxJpegSize(uint8_t camera_id)3935 int QCamera3HardwareInterface::calcMaxJpegSize(uint8_t camera_id)
3936 {
3937     int32_t max_jpeg_size = 0;
3938     int temp_width, temp_height;
3939     for (int i = 0; i < gCamCapability[camera_id]->picture_sizes_tbl_cnt; i++) {
3940         temp_width = gCamCapability[camera_id]->picture_sizes_tbl[i].width;
3941         temp_height = gCamCapability[camera_id]->picture_sizes_tbl[i].height;
3942         if (temp_width * temp_height > max_jpeg_size ) {
3943             max_jpeg_size = temp_width * temp_height;
3944         }
3945     }
3946     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3947     return max_jpeg_size;
3948 }
3949 
3950 /*===========================================================================
3951  * FUNCTION   : getMaxRawSize
3952  *
3953  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
3954  *
3955  * PARAMETERS :
3956  *
3957  * RETURN     : Largest supported Raw Dimension
3958  *==========================================================================*/
getMaxRawSize(uint8_t camera_id)3959 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint8_t camera_id)
3960 {
3961     int max_width = 0;
3962     cam_dimension_t maxRawSize;
3963 
3964     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
3965     for (int i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
3966         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
3967             max_width = gCamCapability[camera_id]->raw_dim[i].width;
3968             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
3969         }
3970     }
3971     return maxRawSize;
3972 }
3973 
3974 
3975 /*===========================================================================
3976  * FUNCTION   : calcMaxJpegDim
3977  *
3978  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
3979  *
3980  * PARAMETERS :
3981  *
3982  * RETURN     : max_jpeg_dim
3983  *==========================================================================*/
calcMaxJpegDim()3984 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
3985 {
3986     cam_dimension_t max_jpeg_dim;
3987     cam_dimension_t curr_jpeg_dim;
3988     max_jpeg_dim.width = 0;
3989     max_jpeg_dim.height = 0;
3990     curr_jpeg_dim.width = 0;
3991     curr_jpeg_dim.height = 0;
3992     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
3993         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
3994         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
3995         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
3996             max_jpeg_dim.width * max_jpeg_dim.height ) {
3997             max_jpeg_dim.width = curr_jpeg_dim.width;
3998             max_jpeg_dim.height = curr_jpeg_dim.height;
3999         }
4000     }
4001     return max_jpeg_dim;
4002 }
4003 
4004 
4005 /*===========================================================================
4006  * FUNCTION   : initStaticMetadata
4007  *
4008  * DESCRIPTION: initialize the static metadata
4009  *
4010  * PARAMETERS :
4011  *   @cameraId  : camera Id
4012  *
4013  * RETURN     : int32_t type of status
4014  *              0  -- success
4015  *              non-zero failure code
4016  *==========================================================================*/
initStaticMetadata(int cameraId)4017 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
4018 {
4019     int rc = 0;
4020     CameraMetadata staticInfo;
4021 
4022     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
4023     if (!facingBack)
4024         gCamCapability[cameraId]->supported_raw_dim_cnt = 0;
4025 
4026      /* android.info: hardware level */
4027     uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
4028       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
4029     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
4030         &supportedHardwareLevel, 1);
4031     /*HAL 3 only*/
4032     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4033                     &gCamCapability[cameraId]->min_focus_distance, 1);
4034 
4035     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
4036                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
4037 
4038     /*should be using focal lengths but sensor doesn't provide that info now*/
4039     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4040                       &gCamCapability[cameraId]->focal_length,
4041                       1);
4042 
4043     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4044                       gCamCapability[cameraId]->apertures,
4045                       gCamCapability[cameraId]->apertures_count);
4046 
4047     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4048                 gCamCapability[cameraId]->filter_densities,
4049                 gCamCapability[cameraId]->filter_densities_count);
4050 
4051 
4052     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4053                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
4054                       gCamCapability[cameraId]->optical_stab_modes_count);
4055 
4056     staticInfo.update(ANDROID_LENS_POSITION,
4057                       gCamCapability[cameraId]->lens_position,
4058                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
4059 
4060     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
4061                                        gCamCapability[cameraId]->lens_shading_map_size.height};
4062     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
4063                       lens_shading_map_size,
4064                       sizeof(lens_shading_map_size)/sizeof(int32_t));
4065 
4066     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
4067             gCamCapability[cameraId]->sensor_physical_size, 2);
4068 
4069     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
4070             gCamCapability[cameraId]->exposure_time_range, 2);
4071 
4072     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4073             &gCamCapability[cameraId]->max_frame_duration, 1);
4074 
4075     camera_metadata_rational baseGainFactor = {
4076             gCamCapability[cameraId]->base_gain_factor.numerator,
4077             gCamCapability[cameraId]->base_gain_factor.denominator};
4078     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
4079                       &baseGainFactor, 1);
4080 
4081     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4082                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
4083 
4084     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
4085                                   gCamCapability[cameraId]->pixel_array_size.height};
4086     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4087                       pixel_array_size, 2);
4088 
4089     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
4090                                                 gCamCapability[cameraId]->active_array_size.top,
4091                                                 gCamCapability[cameraId]->active_array_size.width,
4092                                                 gCamCapability[cameraId]->active_array_size.height};
4093     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4094                       active_array_size, 4);
4095 
4096     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
4097             &gCamCapability[cameraId]->white_level, 1);
4098 
4099     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
4100             gCamCapability[cameraId]->black_level_pattern, 4);
4101 
4102     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
4103                       &gCamCapability[cameraId]->flash_charge_duration, 1);
4104 
4105     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
4106                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
4107 
4108     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
4109     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
4110                       (int32_t*)&maxFaces, 1);
4111 
4112     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
4113     if (0 && gCamCapability[cameraId]->isTimestampCalibrated) {
4114         timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
4115     }
4116     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4117             &timestampSource, 1);
4118 
4119     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4120                       &gCamCapability[cameraId]->histogram_size, 1);
4121 
4122     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4123             &gCamCapability[cameraId]->max_histogram_count, 1);
4124 
4125     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
4126                                     gCamCapability[cameraId]->sharpness_map_size.height};
4127 
4128     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
4129             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
4130 
4131     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4132             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
4133 
4134     int32_t scalar_formats[] = {
4135             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
4136             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
4137             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
4138             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
4139             HAL_PIXEL_FORMAT_RAW10,
4140             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
4141     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
4142     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
4143                       scalar_formats,
4144                       scalar_formats_count);
4145 
4146     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
4147     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
4148               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
4149               available_processed_sizes);
4150     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
4151                 available_processed_sizes,
4152                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
4153 
4154     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
4155     makeTable(gCamCapability[cameraId]->raw_dim,
4156               gCamCapability[cameraId]->supported_raw_dim_cnt,
4157               available_raw_sizes);
4158     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
4159                 available_raw_sizes,
4160                 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
4161 
4162     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
4163     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
4164                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
4165                  available_fps_ranges);
4166     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4167             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
4168 
4169     camera_metadata_rational exposureCompensationStep = {
4170             gCamCapability[cameraId]->exp_compensation_step.numerator,
4171             gCamCapability[cameraId]->exp_compensation_step.denominator};
4172     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
4173                       &exposureCompensationStep, 1);
4174 
4175     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
4176     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4177                       availableVstabModes, sizeof(availableVstabModes));
4178 
4179     /*HAL 1 and HAL 3 common*/
4180     float maxZoom = 4;
4181     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4182             &maxZoom, 1);
4183 
4184     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
4185     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
4186 
4187     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
4188     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
4189         max3aRegions[2] = 0; /* AF not supported */
4190     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
4191             max3aRegions, 3);
4192 
4193     uint8_t availableFaceDetectModes[] = {
4194             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
4195             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
4196     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4197                       availableFaceDetectModes,
4198                       sizeof(availableFaceDetectModes));
4199 
4200     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
4201                                            gCamCapability[cameraId]->exposure_compensation_max};
4202     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
4203             exposureCompensationRange,
4204             sizeof(exposureCompensationRange)/sizeof(int32_t));
4205 
4206     uint8_t lensFacing = (facingBack) ?
4207             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
4208     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
4209 
4210     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4211                       available_thumbnail_sizes,
4212                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
4213 
4214     /*all sizes will be clubbed into this tag*/
4215     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
4216     uint8_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
4217             (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2,
4218              MAX_SIZES_CNT * 2,
4219              gCamCapability[cameraId]->active_array_size,
4220              gCamCapability[cameraId]->max_downscale_factor);
4221     /*android.scaler.availableStreamConfigurations*/
4222     int32_t max_stream_configs_size =
4223             gCamCapability[cameraId]->picture_sizes_tbl_cnt *
4224             sizeof(scalar_formats)/sizeof(int32_t) * 4;
4225     int32_t available_stream_configs[max_stream_configs_size];
4226     int idx = 0;
4227     for (int j = 0; j < scalar_formats_count; j++) {
4228         switch (scalar_formats[j]) {
4229         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4230         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4231         case HAL_PIXEL_FORMAT_RAW10:
4232             for (int i = 0;
4233                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4234                 available_stream_configs[idx] = scalar_formats[j];
4235                 available_stream_configs[idx+1] =
4236                     gCamCapability[cameraId]->raw_dim[i].width;
4237                 available_stream_configs[idx+2] =
4238                     gCamCapability[cameraId]->raw_dim[i].height;
4239                 available_stream_configs[idx+3] =
4240                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4241                 idx+=4;
4242             }
4243             break;
4244         case HAL_PIXEL_FORMAT_BLOB:
4245             for (int i = 0; i < jpeg_sizes_cnt/2; i++) {
4246                 available_stream_configs[idx] = scalar_formats[j];
4247                 available_stream_configs[idx+1] = available_jpeg_sizes[i*2];
4248                 available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1];
4249                 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4250                 idx+=4;
4251             }
4252             break;
4253         default:
4254             for (int i = 0;
4255                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4256                 available_stream_configs[idx] = scalar_formats[j];
4257                 available_stream_configs[idx+1] =
4258                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4259                 available_stream_configs[idx+2] =
4260                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4261                 available_stream_configs[idx+3] =
4262                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4263                 idx+=4;
4264             }
4265 
4266 
4267             break;
4268         }
4269     }
4270     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4271                       available_stream_configs, idx);
4272     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4273     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4274 
4275     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4276     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4277 
4278     /* android.scaler.availableMinFrameDurations */
4279     int64_t available_min_durations[max_stream_configs_size];
4280     idx = 0;
4281     for (int j = 0; j < scalar_formats_count; j++) {
4282         switch (scalar_formats[j]) {
4283         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4284         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4285             for (int i = 0;
4286                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4287                 available_min_durations[idx] = scalar_formats[j];
4288                 available_min_durations[idx+1] =
4289                     gCamCapability[cameraId]->raw_dim[i].width;
4290                 available_min_durations[idx+2] =
4291                     gCamCapability[cameraId]->raw_dim[i].height;
4292                 available_min_durations[idx+3] =
4293                     gCamCapability[cameraId]->raw_min_duration[i];
4294                 idx+=4;
4295             }
4296             break;
4297         default:
4298             for (int i = 0;
4299                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4300                 available_min_durations[idx] = scalar_formats[j];
4301                 available_min_durations[idx+1] =
4302                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4303                 available_min_durations[idx+2] =
4304                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4305                 available_min_durations[idx+3] =
4306                     gCamCapability[cameraId]->picture_min_duration[i];
4307                 idx+=4;
4308             }
4309             break;
4310         }
4311     }
4312     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
4313                       &available_min_durations[0], idx);
4314 
4315     int32_t max_jpeg_size = calcMaxJpegSize(cameraId);
4316     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
4317                       &max_jpeg_size, 1);
4318 
4319     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
4320     size_t size = 0;
4321     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
4322         int32_t val = lookupFwkName(EFFECT_MODES_MAP,
4323                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
4324                                    gCamCapability[cameraId]->supported_effects[i]);
4325         if (val != NAME_NOT_FOUND) {
4326             avail_effects[size] = (uint8_t)val;
4327             size++;
4328         }
4329     }
4330     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
4331                       avail_effects,
4332                       size);
4333 
4334     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
4335     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
4336     int32_t supported_scene_modes_cnt = 0;
4337     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
4338         int32_t val = lookupFwkName(SCENE_MODES_MAP,
4339                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4340                                 gCamCapability[cameraId]->supported_scene_modes[i]);
4341         if (val != NAME_NOT_FOUND) {
4342             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
4343             supported_indexes[supported_scene_modes_cnt] = i;
4344             supported_scene_modes_cnt++;
4345         }
4346     }
4347 
4348     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4349                       avail_scene_modes,
4350                       supported_scene_modes_cnt);
4351 
4352     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
4353     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
4354                       supported_scene_modes_cnt,
4355                       scene_mode_overrides,
4356                       supported_indexes,
4357                       cameraId);
4358     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
4359                       scene_mode_overrides,
4360                       supported_scene_modes_cnt*3);
4361 
4362     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
4363     size = 0;
4364     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
4365         int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
4366                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
4367                                  gCamCapability[cameraId]->supported_antibandings[i]);
4368         if (val != NAME_NOT_FOUND) {
4369             avail_antibanding_modes[size] = (uint8_t)val;
4370             size++;
4371         }
4372 
4373     }
4374     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4375                       avail_antibanding_modes,
4376                       size);
4377 
4378     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
4379     size = 0;
4380     if (0 == gCamCapability[cameraId]->aberration_modes_count) {
4381         avail_abberation_modes[0] =
4382                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
4383         size++;
4384     } else {
4385         for (size_t i = 0; i < gCamCapability[cameraId]->aberration_modes_count; i++) {
4386             int32_t val = lookupFwkName(COLOR_ABERRATION_MAP,
4387                     sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
4388                     gCamCapability[cameraId]->aberration_modes[i]);
4389             if (val != NAME_NOT_FOUND) {
4390                 avail_abberation_modes[size] = (uint8_t)val;
4391                 size++;
4392             } else {
4393                 ALOGE("%s: Invalid CAC mode %d", __func__,
4394                         gCamCapability[cameraId]->aberration_modes[i]);
4395                 break;
4396             }
4397         }
4398 
4399     }
4400     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4401             avail_abberation_modes,
4402             size);
4403 
4404     char cafProp[PROPERTY_VALUE_MAX];
4405     memset(cafProp, 0, sizeof(cafProp));
4406     property_get("persist.camera.caf.disable", cafProp, "0");
4407     uint8_t cafDisabled = atoi(cafProp);
4408 
4409     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
4410     size = 0;
4411     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
4412         if (cafDisabled &&
4413             ((gCamCapability[cameraId]->supported_focus_modes[i]
4414               == CAM_FOCUS_MODE_CONTINOUS_PICTURE) ||
4415              (gCamCapability[cameraId]->supported_focus_modes[i]
4416               == CAM_FOCUS_MODE_CONTINOUS_VIDEO)))
4417             continue;
4418 
4419         int32_t val = lookupFwkName(FOCUS_MODES_MAP,
4420                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4421                                 gCamCapability[cameraId]->supported_focus_modes[i]);
4422         if (val != NAME_NOT_FOUND) {
4423             avail_af_modes[size] = (uint8_t)val;
4424             size++;
4425         }
4426     }
4427     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
4428                       avail_af_modes,
4429                       size);
4430 
4431     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
4432     size = 0;
4433     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
4434         int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4435                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4436                                     gCamCapability[cameraId]->supported_white_balances[i]);
4437         if (val != NAME_NOT_FOUND) {
4438             avail_awb_modes[size] = (uint8_t)val;
4439             size++;
4440         }
4441     }
4442     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
4443                       avail_awb_modes,
4444                       size);
4445 
4446     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
4447     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
4448       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
4449 
4450     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
4451             available_flash_levels,
4452             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
4453 
4454     uint8_t flashAvailable;
4455     if (gCamCapability[cameraId]->flash_available)
4456         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
4457     else
4458         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
4459     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
4460             &flashAvailable, 1);
4461 
4462     uint8_t avail_ae_modes[5];
4463     size = 0;
4464     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
4465         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
4466         size++;
4467     }
4468     if (flashAvailable) {
4469         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
4470         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
4471         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4472     }
4473     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
4474                       avail_ae_modes,
4475                       size);
4476 
4477     int32_t sensitivity_range[2];
4478     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
4479     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
4480     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
4481                       sensitivity_range,
4482                       sizeof(sensitivity_range) / sizeof(int32_t));
4483 
4484     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4485                       &gCamCapability[cameraId]->max_analog_sensitivity,
4486                       1);
4487 
4488     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
4489     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
4490                       &sensor_orientation,
4491                       1);
4492 
4493     int32_t max_output_streams[3] = {
4494             MAX_STALLING_STREAMS,
4495             MAX_PROCESSED_STREAMS,
4496             MAX_RAW_STREAMS};
4497     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
4498                       max_output_streams,
4499                       3);
4500 
4501     uint8_t avail_leds = 0;
4502     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
4503                       &avail_leds, 0);
4504 
4505     uint8_t focus_dist_calibrated;
4506     int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
4507             sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
4508             gCamCapability[cameraId]->focus_dist_calibrated);
4509     if (val != NAME_NOT_FOUND) {
4510         focus_dist_calibrated = (uint8_t)val;
4511         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4512                      &focus_dist_calibrated, 1);
4513     }
4514 
4515     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
4516     size = 0;
4517     for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
4518             i++) {
4519         int32_t val = lookupFwkName(TEST_PATTERN_MAP,
4520                                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
4521                                     gCamCapability[cameraId]->supported_test_pattern_modes[i]);
4522         if (val != NAME_NOT_FOUND) {
4523             avail_testpattern_modes[size] = val;
4524             size++;
4525         }
4526     }
4527     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4528                       avail_testpattern_modes,
4529                       size);
4530 
4531     uint8_t max_pipeline_depth = MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY;
4532     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
4533                       &max_pipeline_depth,
4534                       1);
4535 
4536     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
4537     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4538                       &partial_result_count,
4539                        1);
4540 
4541     uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
4542     uint8_t available_capabilities_count = 0;
4543     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
4544     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
4545     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
4546     if (facingBack) {
4547         available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
4548     }
4549     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4550                       available_capabilities,
4551                       available_capabilities_count);
4552 
4553     int32_t max_input_streams = 0;
4554     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4555                       &max_input_streams,
4556                       1);
4557 
4558     int32_t io_format_map[] = {};
4559     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4560                       io_format_map, 0);
4561 
4562     int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
4563     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
4564                       &max_latency,
4565                       1);
4566 
4567     float optical_axis_angle[2];
4568     optical_axis_angle[0] = 0; //need to verify
4569     optical_axis_angle[1] = 0; //need to verify
4570     staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
4571                       optical_axis_angle,
4572                       2);
4573 
4574     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
4575     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4576                       available_hot_pixel_modes,
4577                       1);
4578 
4579     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
4580                                       ANDROID_EDGE_MODE_FAST};
4581     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4582                       available_edge_modes,
4583                       2);
4584 
4585     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
4586                                            ANDROID_NOISE_REDUCTION_MODE_FAST};
4587     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4588                       available_noise_red_modes,
4589                       2);
4590 
4591     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
4592                                          ANDROID_TONEMAP_MODE_FAST};
4593     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4594                       available_tonemap_modes,
4595                       2);
4596 
4597     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
4598     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4599                       available_hot_pixel_map_modes,
4600                       1);
4601 
4602     int32_t avail_min_frame_durations_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt *
4603                                                  sizeof(scalar_formats)/sizeof(int32_t) * 4;
4604     int64_t avail_min_frame_durations[avail_min_frame_durations_size];
4605     int pos = 0;
4606     for (int j = 0; j < scalar_formats_count; j++) {
4607         for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4608            avail_min_frame_durations[pos]   = scalar_formats[j];
4609            avail_min_frame_durations[pos+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4610            avail_min_frame_durations[pos+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4611            avail_min_frame_durations[pos+3] = gCamCapability[cameraId]->picture_min_duration[i];
4612            pos+=4;
4613         }
4614     }
4615     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
4616                       avail_min_frame_durations,
4617                       avail_min_frame_durations_size);
4618 
4619     uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4620         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4621         gCamCapability[cameraId]->reference_illuminant1);
4622     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
4623                       &fwkReferenceIlluminant, 1);
4624 
4625     fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4626         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4627         gCamCapability[cameraId]->reference_illuminant2);
4628     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4629                       &fwkReferenceIlluminant, 1);
4630 
4631     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
4632                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
4633                       3*3);
4634 
4635     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
4636                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
4637                       3*3);
4638 
4639     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
4640                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
4641                       3*3);
4642 
4643     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
4644                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
4645                       3*3);
4646 
4647     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4648                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
4649                       3*3);
4650 
4651     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
4652                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
4653                       3*3);
4654 
4655     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
4656        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
4657        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
4658        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4659        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
4660        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4661        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
4662        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
4663        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
4664        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
4665        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
4666        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
4667        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4668        ANDROID_JPEG_GPS_COORDINATES,
4669        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
4670        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
4671        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
4672        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4673        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
4674        ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
4675        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
4676        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
4677        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
4678        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
4679        ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
4680        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4681        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
4682        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4683        ANDROID_BLACK_LEVEL_LOCK };
4684 
4685     size_t request_keys_cnt =
4686             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
4687     //NOTE: Please increase available_request_keys array size before
4688     //adding any new entries.
4689     int32_t available_request_keys[request_keys_cnt+1];
4690     memcpy(available_request_keys, request_keys_basic,
4691             sizeof(request_keys_basic));
4692     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4693         available_request_keys[request_keys_cnt++] =
4694                 ANDROID_CONTROL_AF_REGIONS;
4695     }
4696     //NOTE: Please increase available_request_keys array size before
4697     //adding any new entries.
4698     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
4699                       available_request_keys, request_keys_cnt);
4700 
4701     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
4702        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
4703        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
4704        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
4705        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
4706        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4707        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
4708        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
4709        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
4710        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4711        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
4712        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
4713        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
4714        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
4715        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4716        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
4717        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4718        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
4719        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4720        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4721        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
4722        ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
4723        ANDROID_STATISTICS_FACE_SCORES};
4724     size_t result_keys_cnt =
4725             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
4726     //NOTE: Please increase available_result_keys array size before
4727     //adding any new entries.
4728     int32_t available_result_keys[result_keys_cnt+3];
4729     memcpy(available_result_keys, result_keys_basic,
4730             sizeof(result_keys_basic));
4731     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4732         available_result_keys[result_keys_cnt++] =
4733                 ANDROID_CONTROL_AF_REGIONS;
4734     }
4735     if (facingBack) {
4736        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_NOISE_PROFILE;
4737        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_GREEN_SPLIT;
4738     }
4739     //NOTE: Please increase available_result_keys array size before
4740     //adding any new entries.
4741 
4742     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4743                       available_result_keys, result_keys_cnt);
4744 
4745     int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4746        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4747        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
4748        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
4749        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4750        ANDROID_SCALER_CROPPING_TYPE,
4751        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4752        ANDROID_SYNC_MAX_LATENCY,
4753        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4754        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4755        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
4756        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
4757        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4758        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4759        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4760        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4761        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4762        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4763        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4764        ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
4765        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4766        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4767        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4768        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4769        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4770        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4771        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4772        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
4773        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
4774        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4775        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
4776        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4777        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4778        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4779        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4780        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4781        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
4782        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4783        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4784        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4785        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4786        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4787        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4788        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4789        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4790        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4791        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4792        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4793        ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
4794     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
4795                       available_characteristics_keys,
4796                       sizeof(available_characteristics_keys)/sizeof(int32_t));
4797 
4798     /*available stall durations depend on the hw + sw and will be different for different devices */
4799     /*have to add for raw after implementation*/
4800     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
4801     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
4802 
4803     size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
4804     int64_t available_stall_durations[available_stall_size];
4805     idx = 0;
4806     for (uint32_t j = 0; j < stall_formats_count; j++) {
4807        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
4808           for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4809              available_stall_durations[idx]   = stall_formats[j];
4810              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4811              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4812              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
4813              idx+=4;
4814           }
4815        } else {
4816           for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4817              available_stall_durations[idx]   = stall_formats[j];
4818              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
4819              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
4820              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
4821              idx+=4;
4822           }
4823        }
4824     }
4825     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
4826                       available_stall_durations,
4827                       idx);
4828     //QCAMERA3_OPAQUE_RAW
4829     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4830     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4831     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
4832     case LEGACY_RAW:
4833         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4834             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
4835         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4836             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4837         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4838             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
4839         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4840         break;
4841     case MIPI_RAW:
4842         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4843             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
4844         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4845             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
4846         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4847             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
4848         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
4849         break;
4850     default:
4851         ALOGE("%s: unknown opaque_raw_format %d", __func__,
4852                 gCamCapability[cameraId]->opaque_raw_fmt);
4853         break;
4854     }
4855     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
4856 
4857     if (gCamCapability[cameraId]->supported_raw_dim_cnt) {
4858         int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
4859         for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4860             cam_stream_buf_plane_info_t buf_planes;
4861             strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
4862             strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
4863             mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
4864                 &gCamCapability[cameraId]->padding_info, &buf_planes);
4865             strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
4866         }
4867         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
4868                 3*gCamCapability[cameraId]->supported_raw_dim_cnt);
4869     }
4870     gStaticMetadata[cameraId] = staticInfo.release();
4871     return rc;
4872 }
4873 
4874 /*===========================================================================
4875  * FUNCTION   : makeTable
4876  *
4877  * DESCRIPTION: make a table of sizes
4878  *
4879  * PARAMETERS :
4880  *
4881  *
4882  *==========================================================================*/
makeTable(cam_dimension_t * dimTable,uint8_t size,int32_t * sizeTable)4883 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
4884                                           int32_t* sizeTable)
4885 {
4886     int j = 0;
4887     for (int i = 0; i < size; i++) {
4888         sizeTable[j] = dimTable[i].width;
4889         sizeTable[j+1] = dimTable[i].height;
4890         j+=2;
4891     }
4892 }
4893 
4894 /*===========================================================================
4895  * FUNCTION   : makeFPSTable
4896  *
4897  * DESCRIPTION: make a table of fps ranges
4898  *
4899  * PARAMETERS :
4900  *
4901  *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,uint8_t size,int32_t * fpsRangesTable)4902 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
4903                                           int32_t* fpsRangesTable)
4904 {
4905     int j = 0;
4906     for (int i = 0; i < size; i++) {
4907         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
4908         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
4909         j+=2;
4910     }
4911 }
4912 
4913 /*===========================================================================
4914  * FUNCTION   : makeOverridesList
4915  *
4916  * DESCRIPTION: make a list of scene mode overrides
4917  *
4918  * PARAMETERS :
4919  *
4920  *
4921  *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,uint8_t size,uint8_t * overridesList,uint8_t * supported_indexes,int camera_id)4922 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
4923                                                   uint8_t size, uint8_t* overridesList,
4924                                                   uint8_t* supported_indexes,
4925                                                   int camera_id)
4926 {
4927     /*daemon will give a list of overrides for all scene modes.
4928       However we should send the fwk only the overrides for the scene modes
4929       supported by the framework*/
4930     int j = 0, index = 0, supt = 0;
4931     uint8_t focus_override;
4932     for (int i = 0; i < size; i++) {
4933         supt = 0;
4934         index = supported_indexes[i];
4935         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
4936         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
4937                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4938                                                     overridesTable[index].awb_mode);
4939         focus_override = (uint8_t)overridesTable[index].af_mode;
4940         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
4941            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
4942               supt = 1;
4943               break;
4944            }
4945         }
4946         if (supt) {
4947            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
4948                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4949                                               focus_override);
4950         } else {
4951            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
4952         }
4953         j+=3;
4954     }
4955 }
4956 
4957 /*===========================================================================
4958  * FUNCTION   : filterJpegSizes
4959  *
4960  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
4961  *              could be downscaled to
4962  *
4963  * PARAMETERS :
4964  *
4965  * RETURN     : length of jpegSizes array
4966  *==========================================================================*/
4967 
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,uint8_t processedSizesCnt,uint8_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)4968 uint8_t QCamera3HardwareInterface::filterJpegSizes(int32_t* jpegSizes, int32_t* processedSizes,
4969                                                    uint8_t processedSizesCnt,
4970                                                    uint8_t maxCount,
4971                                                    cam_rect_t active_array_size,
4972                                                    uint8_t downscale_factor)
4973 {
4974    if (downscale_factor == 0) {
4975       downscale_factor = 1;
4976    }
4977     int32_t min_width = active_array_size.width / downscale_factor;
4978     int32_t min_height = active_array_size.height / downscale_factor;
4979     uint8_t jpegSizesCnt = 0;
4980     if (processedSizesCnt > maxCount) {
4981         processedSizesCnt = maxCount;
4982     }
4983     for (int i = 0; i < processedSizesCnt; i+=2) {
4984         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
4985             jpegSizes[jpegSizesCnt] = processedSizes[i];
4986             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
4987             jpegSizesCnt += 2;
4988         }
4989     }
4990     return jpegSizesCnt;
4991 }
4992 
4993 /*===========================================================================
4994  * FUNCTION   : getPreviewHalPixelFormat
4995  *
4996  * DESCRIPTION: convert the format to type recognized by framework
4997  *
4998  * PARAMETERS : format : the format from backend
4999  *
5000  ** RETURN    : format recognized by framework
5001  *
5002  *==========================================================================*/
getScalarFormat(int32_t format)5003 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
5004 {
5005     int32_t halPixelFormat;
5006 
5007     switch (format) {
5008     case CAM_FORMAT_YUV_420_NV12:
5009         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5010         break;
5011     case CAM_FORMAT_YUV_420_NV21:
5012         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5013         break;
5014     case CAM_FORMAT_YUV_420_NV21_ADRENO:
5015         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
5016         break;
5017     case CAM_FORMAT_YUV_420_YV12:
5018         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
5019         break;
5020     case CAM_FORMAT_YUV_422_NV16:
5021     case CAM_FORMAT_YUV_422_NV61:
5022     default:
5023         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5024         break;
5025     }
5026     return halPixelFormat;
5027 }
5028 /*===========================================================================
5029  * FUNCTION   : computeNoiseModelEntryS
5030  *
5031  * DESCRIPTION: function to map a given sensitivity to the S noise
5032  *              model parameters in the DNG noise model.
5033  *
5034  * PARAMETERS : sens : the sensor sensitivity
5035  *
5036  ** RETURN    : S (sensor amplification) noise
5037  *
5038  *==========================================================================*/
5039 
computeNoiseModelEntryS(int32_t sens)5040 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
5041    double s = 3.738032e-06 * sens + 3.651935e-04;
5042    return s < 0.0 ? 0.0 : s;
5043 }
5044 
5045 /*===========================================================================
5046  * FUNCTION   : computeNoiseModelEntryO
5047  *
5048  * DESCRIPTION: function to map a given sensitivity to the O noise
5049  *              model parameters in the DNG noise model.
5050  *
5051  * PARAMETERS : sens : the sensor sensitivity
5052  *
5053  ** RETURN    : O (sensor readout) noise
5054  *
5055  *==========================================================================*/
5056 
computeNoiseModelEntryO(int32_t sens)5057 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
5058   double o = 4.499952e-07 * sens + -2.968624e-04;
5059   return o < 0.0 ? 0.0 : o;
5060 }
5061 
5062 /*===========================================================================
5063  * FUNCTION   : getSensorSensitivity
5064  *
5065  * DESCRIPTION: convert iso_mode to an integer value
5066  *
5067  * PARAMETERS : iso_mode : the iso_mode supported by sensor
5068  *
5069  ** RETURN    : sensitivity supported by sensor
5070  *
5071  *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)5072 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
5073 {
5074     int32_t sensitivity;
5075 
5076     switch (iso_mode) {
5077     case CAM_ISO_MODE_100:
5078         sensitivity = 100;
5079         break;
5080     case CAM_ISO_MODE_200:
5081         sensitivity = 200;
5082         break;
5083     case CAM_ISO_MODE_400:
5084         sensitivity = 400;
5085         break;
5086     case CAM_ISO_MODE_800:
5087         sensitivity = 800;
5088         break;
5089     case CAM_ISO_MODE_1600:
5090         sensitivity = 1600;
5091         break;
5092     default:
5093         sensitivity = -1;
5094         break;
5095     }
5096     return sensitivity;
5097 }
5098 
5099 /*===========================================================================
5100  * FUNCTION   : AddSetParmEntryToBatch
5101  *
5102  * DESCRIPTION: add set parameter entry into batch
5103  *
5104  * PARAMETERS :
5105  *   @p_table     : ptr to parameter buffer
5106  *   @paramType   : parameter type
5107  *   @paramLength : length of parameter value
5108  *   @paramValue  : ptr to parameter value
5109  *
5110  * RETURN     : int32_t type of status
5111  *              NO_ERROR  -- success
5112  *              none-zero failure code
5113  *==========================================================================*/
AddSetParmEntryToBatch(parm_buffer_t * p_table,cam_intf_parm_type_t paramType,uint32_t paramLength,void * paramValue)5114 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
5115                                                           cam_intf_parm_type_t paramType,
5116                                                           uint32_t paramLength,
5117                                                           void *paramValue)
5118 {
5119     void* dst;
5120     if ((NULL == p_table) || (NULL == paramValue) ||
5121         (paramType >= CAM_INTF_PARM_MAX)) {
5122         ALOGE("%s: Invalid p_table: %p, paramValue: %p, param type: %d",
5123             __func__, p_table, paramValue, paramType);
5124         return BAD_VALUE;
5125     }
5126     /*************************************************************************
5127     *                   Copy contents into entry                             *
5128     *************************************************************************/
5129     if (paramLength > get_size_of(paramType)) {
5130         ALOGE("%s: input larger than max entry size, type=%d, length =%d",
5131                 __func__, paramType, paramLength);
5132         return BAD_VALUE;
5133     }
5134     dst = get_pointer_of(paramType, p_table);
5135     if(NULL != dst){
5136         memcpy(dst, paramValue, paramLength);
5137         p_table->is_valid[paramType] = 1;
5138     }
5139     return NO_ERROR;
5140 }
5141 
5142 /*===========================================================================
5143  * FUNCTION   : lookupFwkName
5144  *
5145  * DESCRIPTION: In case the enum is not same in fwk and backend
5146  *              make sure the parameter is correctly propogated
5147  *
5148  * PARAMETERS  :
5149  *   @arr      : map between the two enums
5150  *   @len      : len of the map
5151  *   @hal_name : name of the hal_parm to map
5152  *
5153  * RETURN     : int type of status
5154  *              fwk_name  -- success
5155  *              none-zero failure code
5156  *==========================================================================*/
lookupFwkName(const QCameraMap arr[],int len,int hal_name)5157 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
5158                                              int len, int hal_name)
5159 {
5160 
5161     for (int i = 0; i < len; i++) {
5162         if (arr[i].hal_name == hal_name)
5163             return arr[i].fwk_name;
5164     }
5165 
5166     /* Not able to find matching framework type is not necessarily
5167      * an error case. This happens when mm-camera supports more attributes
5168      * than the frameworks do */
5169     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
5170     return NAME_NOT_FOUND;
5171 }
5172 
5173 /*===========================================================================
5174  * FUNCTION   : lookupHalName
5175  *
5176  * DESCRIPTION: In case the enum is not same in fwk and backend
5177  *              make sure the parameter is correctly propogated
5178  *
5179  * PARAMETERS  :
5180  *   @arr      : map between the two enums
5181  *   @len      : len of the map
5182  *   @fwk_name : name of the hal_parm to map
5183  *
5184  * RETURN     : int32_t type of status
5185  *              hal_name  -- success
5186  *              none-zero failure code
5187  *==========================================================================*/
lookupHalName(const QCameraMap arr[],int len,unsigned int fwk_name)5188 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
5189                                              int len, unsigned int fwk_name)
5190 {
5191     for (int i = 0; i < len; i++) {
5192        if (arr[i].fwk_name == fwk_name)
5193            return arr[i].hal_name;
5194     }
5195     ALOGE("%s: Cannot find matching hal type", __func__);
5196     return NAME_NOT_FOUND;
5197 }
5198 
5199 /*===========================================================================
5200  * FUNCTION   : lookupProp
5201  *
5202  * DESCRIPTION: lookup a value by its name
5203  *
5204  * PARAMETERS :
5205  *   @attr    : map contains <name, value>
5206  *   @len     : size of the map
5207  *   @name    : name to be looked up
5208  *
5209  * RETURN     : Value if found
5210  *              CAM_CDS_MODE_MAX if not found
5211  *==========================================================================*/
lookupProp(const QCameraPropMap arr[],int len,const char * name)5212 cam_cds_mode_type_t QCamera3HardwareInterface::lookupProp(const QCameraPropMap arr[],
5213         int len, const char *name)
5214 {
5215     if (name) {
5216         for (int i = 0; i < len; i++) {
5217             if (!strcmp(arr[i].desc, name)) {
5218                 return arr[i].val;
5219             }
5220         }
5221     }
5222     return CAM_CDS_MODE_MAX;
5223 }
5224 
5225 /*===========================================================================
5226  * FUNCTION   : getCapabilities
5227  *
5228  * DESCRIPTION: query camera capabilities
5229  *
5230  * PARAMETERS :
5231  *   @cameraId  : camera Id
5232  *   @info      : camera info struct to be filled in with camera capabilities
5233  *
5234  * RETURN     : int32_t type of status
5235  *              NO_ERROR  -- success
5236  *              none-zero failure code
5237  *==========================================================================*/
getCamInfo(int cameraId,struct camera_info * info)5238 int QCamera3HardwareInterface::getCamInfo(int cameraId,
5239                                     struct camera_info *info)
5240 {
5241     ATRACE_CALL();
5242     int rc = 0;
5243 
5244     if (NULL == gCamCapability[cameraId]) {
5245         rc = initCapabilities(cameraId);
5246         if (rc < 0) {
5247             //pthread_mutex_unlock(&g_camlock);
5248             return rc;
5249         }
5250     }
5251 
5252     if (NULL == gStaticMetadata[cameraId]) {
5253         rc = initStaticMetadata(cameraId);
5254         if (rc < 0) {
5255             return rc;
5256         }
5257     }
5258 
5259     switch(gCamCapability[cameraId]->position) {
5260     case CAM_POSITION_BACK:
5261         info->facing = CAMERA_FACING_BACK;
5262         break;
5263 
5264     case CAM_POSITION_FRONT:
5265         info->facing = CAMERA_FACING_FRONT;
5266         break;
5267 
5268     default:
5269         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
5270         rc = -1;
5271         break;
5272     }
5273 
5274 
5275     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
5276     info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
5277     info->static_camera_characteristics = gStaticMetadata[cameraId];
5278 
5279     return rc;
5280 }
5281 
5282 /*===========================================================================
5283  * FUNCTION   : translateCapabilityToMetadata
5284  *
5285  * DESCRIPTION: translate the capability into camera_metadata_t
5286  *
5287  * PARAMETERS : type of the request
5288  *
5289  *
5290  * RETURN     : success: camera_metadata_t*
5291  *              failure: NULL
5292  *
5293  *==========================================================================*/
translateCapabilityToMetadata(int type)5294 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
5295 {
5296     pthread_mutex_lock(&mMutex);
5297 
5298     if (mDefaultMetadata[type] != NULL) {
5299         pthread_mutex_unlock(&mMutex);
5300         return mDefaultMetadata[type];
5301     }
5302     //first time we are handling this request
5303     //fill up the metadata structure using the wrapper class
5304     CameraMetadata settings;
5305     //translate from cam_capability_t to camera_metadata_tag_t
5306     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
5307     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
5308     int32_t defaultRequestID = 0;
5309     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
5310 
5311     /* OIS disable */
5312     char ois_prop[PROPERTY_VALUE_MAX];
5313     memset(ois_prop, 0, sizeof(ois_prop));
5314     property_get("persist.camera.ois.disable", ois_prop, "0");
5315     uint8_t ois_disable = atoi(ois_prop);
5316 
5317     /* OIS/EIS disable */
5318     char eis_prop[PROPERTY_VALUE_MAX];
5319     memset(eis_prop, 0, sizeof(eis_prop));
5320     property_get("camera.eis.enable", eis_prop, "0");
5321     mEisEnable = atoi(eis_prop);
5322 
5323     /* Force video to use OIS */
5324     char videoOisProp[PROPERTY_VALUE_MAX];
5325     memset(videoOisProp, 0, sizeof(videoOisProp));
5326     property_get("persist.camera.ois.video", videoOisProp, "1");
5327     uint8_t forceVideoOis = atoi(videoOisProp);
5328 
5329     uint8_t controlIntent = 0;
5330     uint8_t focusMode;
5331     uint8_t vsMode;
5332     uint8_t optStabMode;
5333     uint8_t cacMode;
5334     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5335     switch (type) {
5336       case CAMERA3_TEMPLATE_PREVIEW:
5337         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
5338         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5339         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5340         break;
5341       case CAMERA3_TEMPLATE_STILL_CAPTURE:
5342         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
5343         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5344         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5345         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
5346         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
5347         break;
5348       case CAMERA3_TEMPLATE_VIDEO_RECORD:
5349         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
5350         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5351         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5352         if (forceVideoOis)
5353             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5354         break;
5355       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
5356         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
5357         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5358         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5359         if (forceVideoOis)
5360             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5361         break;
5362       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
5363         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
5364         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5365         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5366         break;
5367       case CAMERA3_TEMPLATE_MANUAL:
5368         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
5369         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5370         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5371         break;
5372       default:
5373         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
5374         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5375         break;
5376     }
5377     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
5378     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
5379     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
5380         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5381     }
5382     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
5383 
5384     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5385             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
5386         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5387     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5388             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
5389             || ois_disable)
5390         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5391     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
5392 
5393     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
5394             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
5395 
5396     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
5397     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
5398 
5399     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
5400     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
5401 
5402     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
5403     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
5404 
5405     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
5406     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
5407 
5408     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
5409     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
5410 
5411     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
5412     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
5413 
5414     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
5415     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
5416 
5417     /*flash*/
5418     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
5419     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
5420 
5421     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
5422     settings.update(ANDROID_FLASH_FIRING_POWER,
5423             &flashFiringLevel, 1);
5424 
5425     /* lens */
5426     float default_aperture = gCamCapability[mCameraId]->apertures[0];
5427     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
5428 
5429     if (gCamCapability[mCameraId]->filter_densities_count) {
5430         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
5431         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
5432                         gCamCapability[mCameraId]->filter_densities_count);
5433     }
5434 
5435     float default_focal_length = gCamCapability[mCameraId]->focal_length;
5436     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
5437 
5438     float default_focus_distance = 0;
5439     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
5440 
5441     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
5442     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
5443 
5444     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5445     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5446 
5447     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
5448     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
5449 
5450     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
5451     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
5452 
5453     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
5454     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
5455 
5456     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
5457     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
5458 
5459     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5460     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5461 
5462     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5463     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
5464 
5465     /* Exposure time(Update the Min Exposure Time)*/
5466     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
5467     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
5468 
5469     /* frame duration */
5470     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
5471     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
5472 
5473     /* sensitivity */
5474     static const int32_t default_sensitivity = 100;
5475     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
5476 
5477     /*edge mode*/
5478     static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
5479     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
5480 
5481     /*noise reduction mode*/
5482     static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5483     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
5484 
5485     /*color correction mode*/
5486     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
5487     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
5488 
5489     /*transform matrix mode*/
5490     static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5491     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
5492 
5493     uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
5494     settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
5495 
5496     int32_t scaler_crop_region[4];
5497     scaler_crop_region[0] = 0;
5498     scaler_crop_region[1] = 0;
5499     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
5500     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
5501     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
5502 
5503     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
5504     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
5505 
5506     /*focus distance*/
5507     float focus_distance = 0.0;
5508     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
5509 
5510     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
5511     float max_range = 0.0;
5512     float max_fixed_fps = 0.0;
5513     int32_t fps_range[2] = {0, 0};
5514     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
5515             i++) {
5516         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
5517             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5518         if (type == CAMERA3_TEMPLATE_PREVIEW ||
5519                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
5520                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
5521             if (range > max_range) {
5522                 fps_range[0] =
5523                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5524                 fps_range[1] =
5525                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5526                 max_range = range;
5527             }
5528         } else {
5529             if (range < 0.01 && max_fixed_fps <
5530                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
5531                 fps_range[0] =
5532                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5533                 fps_range[1] =
5534                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5535                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5536             }
5537         }
5538     }
5539     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
5540 
5541     /*precapture trigger*/
5542     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
5543     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
5544 
5545     /*af trigger*/
5546     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
5547     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
5548 
5549     /* ae & af regions */
5550     int32_t active_region[] = {
5551             gCamCapability[mCameraId]->active_array_size.left,
5552             gCamCapability[mCameraId]->active_array_size.top,
5553             gCamCapability[mCameraId]->active_array_size.left +
5554                     gCamCapability[mCameraId]->active_array_size.width,
5555             gCamCapability[mCameraId]->active_array_size.top +
5556                     gCamCapability[mCameraId]->active_array_size.height,
5557             0};
5558     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
5559     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
5560 
5561     /* black level lock */
5562     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5563     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
5564 
5565     /* face detect mode */
5566     uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
5567     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
5568 
5569     /* lens shading map mode */
5570     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
5571     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type &&
5572         gCamCapability[mCameraId]->supported_raw_dim_cnt > 0) {
5573         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
5574     }
5575     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
5576 
5577     //special defaults for manual template
5578     if (type == CAMERA3_TEMPLATE_MANUAL) {
5579         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
5580         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
5581 
5582         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
5583         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
5584 
5585         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
5586         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
5587 
5588         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
5589         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
5590 
5591         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
5592         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
5593 
5594         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
5595         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
5596     }
5597 
5598     /* CDS default */
5599     char prop[PROPERTY_VALUE_MAX];
5600     memset(prop, 0, sizeof(prop));
5601     property_get("persist.camera.CDS", prop, "Auto");
5602     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
5603     cds_mode = lookupProp(CDS_MAP, sizeof(CDS_MAP)/sizeof(QCameraPropMap), prop);
5604     if (CAM_CDS_MODE_MAX == cds_mode) {
5605         cds_mode = CAM_CDS_MODE_AUTO;
5606     }
5607     int32_t mode = cds_mode;
5608     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
5609 
5610     mDefaultMetadata[type] = settings.release();
5611 
5612     pthread_mutex_unlock(&mMutex);
5613     return mDefaultMetadata[type];
5614 }
5615 
5616 /*===========================================================================
5617  * FUNCTION   : setFrameParameters
5618  *
5619  * DESCRIPTION: set parameters per frame as requested in the metadata from
5620  *              framework
5621  *
5622  * PARAMETERS :
5623  *   @request   : request that needs to be serviced
5624  *   @streamID : Stream ID of all the requested streams
5625  *
5626  * RETURN     : success: NO_ERROR
5627  *              failure:
5628  *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,uint32_t snapshotStreamId)5629 int QCamera3HardwareInterface::setFrameParameters(
5630                     camera3_capture_request_t *request,
5631                     cam_stream_ID_t streamID,
5632                     uint32_t snapshotStreamId)
5633 {
5634     /*translate from camera_metadata_t type to parm_type_t*/
5635     int rc = 0;
5636     int32_t hal_version = CAM_HAL_V3;
5637 
5638     memset(mParameters, 0, sizeof(parm_buffer_t));
5639     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
5640                 sizeof(hal_version), &hal_version);
5641     if (rc < 0) {
5642         ALOGE("%s: Failed to set hal version in the parameters", __func__);
5643         return BAD_VALUE;
5644     }
5645 
5646     /*we need to update the frame number in the parameters*/
5647     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
5648                                 sizeof(request->frame_number), &(request->frame_number));
5649     if (rc < 0) {
5650         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5651         return BAD_VALUE;
5652     }
5653 
5654     /* Update stream id of all the requested buffers */
5655     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
5656                                 sizeof(cam_stream_ID_t), &streamID);
5657 
5658     if (rc < 0) {
5659         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
5660         return BAD_VALUE;
5661     }
5662 
5663     if(request->settings != NULL){
5664         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
5665     }
5666 
5667     return rc;
5668 }
5669 
5670 /*===========================================================================
5671  * FUNCTION   : setReprocParameters
5672  *
5673  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
5674  *              return it.
5675  *
5676  * PARAMETERS :
5677  *   @request   : request that needs to be serviced
5678  *
5679  * RETURN     : success: NO_ERROR
5680  *              failure:
5681  *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)5682 int32_t QCamera3HardwareInterface::setReprocParameters(
5683         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
5684         uint32_t snapshotStreamId)
5685 {
5686     /*translate from camera_metadata_t type to parm_type_t*/
5687     int rc = 0;
5688 
5689     if (NULL == request->settings){
5690         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
5691         return BAD_VALUE;
5692     }
5693 
5694     if (NULL == reprocParam) {
5695         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
5696         return BAD_VALUE;
5697     }
5698     memset(reprocParam, 0, sizeof(metadata_buffer_t));
5699 
5700     /*we need to update the frame number in the parameters*/
5701     rc = AddSetParmEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
5702                                 sizeof(request->frame_number), &(request->frame_number));
5703     if (rc < 0) {
5704         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5705         return rc;
5706     }
5707 
5708     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
5709     if (rc < 0) {
5710         ALOGE("%s: Failed to translate reproc request", __func__);
5711         return rc;
5712     }
5713 
5714     CameraMetadata frame_settings;
5715     frame_settings = request->settings;
5716     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
5717             frame_settings.exists(QCAMERA3_CROP_REPROCESS) &&
5718             frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) {
5719         int32_t *crop_count =
5720                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
5721         int32_t *crop_data =
5722                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
5723         int32_t *crop_stream_ids =
5724                 frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32;
5725         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
5726             bool found = false;
5727             int32_t i;
5728             for (i = 0; i < *crop_count; i++) {
5729                 if (crop_stream_ids[i] == (int32_t) request->input_buffer->stream) {
5730                     found = true;
5731                     break;
5732                 }
5733             }
5734 
5735             if (found) {
5736                 cam_crop_data_t crop_meta;
5737                 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
5738                 crop_meta.num_of_streams = 1;
5739                 crop_meta.crop_info[0].crop.left   = crop_data[i*4];
5740                 crop_meta.crop_info[0].crop.top    = crop_data[i*4 + 1];
5741                 crop_meta.crop_info[0].crop.width  = crop_data[i*4 + 2];
5742                 crop_meta.crop_info[0].crop.height = crop_data[i*4 + 3];
5743                 rc = AddSetParmEntryToBatch(reprocParam,
5744                         CAM_INTF_META_CROP_DATA,
5745                         sizeof(cam_crop_data_t), &crop_meta);
5746                 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
5747                         __func__,
5748                         request->input_buffer->stream,
5749                         crop_meta.crop_info[0].crop.left,
5750                         crop_meta.crop_info[0].crop.top,
5751                         crop_meta.crop_info[0].crop.width,
5752                         crop_meta.crop_info[0].crop.height);
5753             } else {
5754                 ALOGE("%s: No matching reprocess input stream found!", __func__);
5755             }
5756         } else {
5757             ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
5758         }
5759     }
5760 
5761     return rc;
5762 }
5763 
5764 /*===========================================================================
5765  * FUNCTION   : translateToHalMetadata
5766  *
5767  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
5768  *
5769  *
5770  * PARAMETERS :
5771  *   @request  : request sent from framework
5772  *
5773  *
5774  * RETURN     : success: NO_ERROR
5775  *              failure:
5776  *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)5777 int QCamera3HardwareInterface::translateToHalMetadata
5778                                   (const camera3_capture_request_t *request,
5779                                    metadata_buffer_t *hal_metadata,
5780                                    uint32_t snapshotStreamId)
5781 {
5782     int rc = 0;
5783     CameraMetadata frame_settings;
5784     frame_settings = request->settings;
5785 
5786     /* Do not change the order of the following list unless you know what you are
5787      * doing.
5788      * The order is laid out in such a way that parameters in the front of the table
5789      * may be used to override the parameters later in the table. Examples are:
5790      * 1. META_MODE should precede AEC/AWB/AF MODE
5791      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
5792      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
5793      * 4. Any mode should precede it's corresponding settings
5794      */
5795     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
5796         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
5797         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_MODE,
5798                 sizeof(metaMode), &metaMode);
5799         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
5800            camera_metadata_entry entry = frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
5801            if (0 < entry.count) {
5802                uint8_t fwk_sceneMode = entry.data.u8[0];
5803                uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
5804                                                  sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
5805                                                  fwk_sceneMode);
5806                rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5807                     sizeof(sceneMode), &sceneMode);
5808            }
5809         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
5810            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
5811            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5812                 sizeof(sceneMode), &sceneMode);
5813         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
5814            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
5815            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
5816                 sizeof(sceneMode), &sceneMode);
5817         }
5818     }
5819 
5820     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
5821         uint8_t fwk_aeMode =
5822             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
5823         uint8_t aeMode;
5824         int32_t redeye;
5825 
5826         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
5827             aeMode = CAM_AE_MODE_OFF;
5828         } else {
5829             aeMode = CAM_AE_MODE_ON;
5830         }
5831         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
5832             redeye = 1;
5833         } else {
5834             redeye = 0;
5835         }
5836 
5837         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
5838                                           sizeof(AE_FLASH_MODE_MAP),
5839                                           fwk_aeMode);
5840         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
5841                 sizeof(aeMode), &aeMode);
5842         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
5843                 sizeof(flashMode), &flashMode);
5844         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
5845                 sizeof(redeye), &redeye);
5846     }
5847 
5848     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
5849         uint8_t fwk_whiteLevel =
5850             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
5851         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
5852                 sizeof(WHITE_BALANCE_MODES_MAP),
5853                 fwk_whiteLevel);
5854         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
5855                 sizeof(whiteLevel), &whiteLevel);
5856     }
5857 
5858     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5859         uint8_t fwk_cacMode =
5860                 frame_settings.find(
5861                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5862         int8_t val = lookupHalName(COLOR_ABERRATION_MAP,
5863                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
5864                 fwk_cacMode);
5865         if (NAME_NOT_FOUND != val) {
5866             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
5867             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_CAC,
5868                     sizeof(cacMode), &cacMode);
5869         } else {
5870             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
5871         }
5872     }
5873 
5874     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
5875         uint8_t fwk_focusMode =
5876             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
5877         uint8_t focusMode;
5878         focusMode = lookupHalName(FOCUS_MODES_MAP,
5879                                    sizeof(FOCUS_MODES_MAP),
5880                                    fwk_focusMode);
5881         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
5882                 sizeof(focusMode), &focusMode);
5883     }
5884 
5885     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
5886         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
5887         rc = AddSetParmEntryToBatch(hal_metadata,
5888                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
5889                 sizeof(focalDistance), &focalDistance);
5890     }
5891 
5892     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
5893         uint8_t fwk_antibandingMode =
5894             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
5895         int32_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
5896                      sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
5897                      fwk_antibandingMode);
5898         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
5899                 sizeof(hal_antibandingMode), &hal_antibandingMode);
5900     }
5901 
5902     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
5903         int32_t expCompensation = frame_settings.find(
5904             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
5905         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
5906             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
5907         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
5908             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
5909         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
5910           sizeof(expCompensation), &expCompensation);
5911     }
5912 
5913     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
5914         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
5915         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
5916                 sizeof(aeLock), &aeLock);
5917     }
5918     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5919         cam_fps_range_t fps_range;
5920         fps_range.min_fps =
5921             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
5922         fps_range.max_fps =
5923             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
5924         fps_range.video_min_fps = fps_range.min_fps;
5925         fps_range.video_max_fps = fps_range.max_fps;
5926         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
5927                 sizeof(fps_range), &fps_range);
5928     }
5929 
5930     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
5931         uint8_t awbLock =
5932             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
5933         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
5934                 sizeof(awbLock), &awbLock);
5935     }
5936 
5937     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
5938         uint8_t fwk_effectMode =
5939             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
5940         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
5941                 sizeof(EFFECT_MODES_MAP),
5942                 fwk_effectMode);
5943         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
5944                 sizeof(effectMode), &effectMode);
5945     }
5946 
5947     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
5948         uint8_t colorCorrectMode =
5949             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
5950         rc =
5951             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
5952                     sizeof(colorCorrectMode), &colorCorrectMode);
5953     }
5954 
5955     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
5956         cam_color_correct_gains_t colorCorrectGains;
5957         for (int i = 0; i < 4; i++) {
5958             colorCorrectGains.gains[i] =
5959                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
5960         }
5961         rc =
5962             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
5963                     sizeof(colorCorrectGains), &colorCorrectGains);
5964     }
5965 
5966     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
5967         cam_color_correct_matrix_t colorCorrectTransform;
5968         cam_rational_type_t transform_elem;
5969         int num = 0;
5970         for (int i = 0; i < 3; i++) {
5971            for (int j = 0; j < 3; j++) {
5972               transform_elem.numerator =
5973                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
5974               transform_elem.denominator =
5975                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
5976               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
5977               num++;
5978            }
5979         }
5980         rc =
5981             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
5982                     sizeof(colorCorrectTransform), &colorCorrectTransform);
5983     }
5984 
5985     cam_trigger_t aecTrigger;
5986     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
5987     aecTrigger.trigger_id = -1;
5988     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
5989         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
5990         aecTrigger.trigger =
5991             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
5992         aecTrigger.trigger_id =
5993             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
5994         rc = AddSetParmEntryToBatch(hal_metadata,
5995                 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
5996                 sizeof(aecTrigger), &aecTrigger);
5997     }
5998     /*af_trigger must come with a trigger id*/
5999     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
6000         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
6001         cam_trigger_t af_trigger;
6002         af_trigger.trigger =
6003             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
6004         af_trigger.trigger_id =
6005             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
6006         rc = AddSetParmEntryToBatch(hal_metadata,
6007                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
6008     }
6009 
6010     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
6011         int32_t demosaic =
6012             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
6013         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
6014                 sizeof(demosaic), &demosaic);
6015     }
6016 
6017     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
6018         cam_edge_application_t edge_application;
6019         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
6020         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
6021             edge_application.sharpness = 0;
6022         } else {
6023             if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
6024                 uint8_t edgeStrength =
6025                     frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
6026                 edge_application.sharpness = (int32_t)edgeStrength;
6027             } else {
6028                 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
6029             }
6030         }
6031         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
6032                 sizeof(edge_application), &edge_application);
6033     }
6034 
6035     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
6036         int32_t respectFlashMode = 1;
6037         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
6038             uint8_t fwk_aeMode =
6039                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
6040             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
6041                 respectFlashMode = 0;
6042                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
6043                     __func__);
6044             }
6045         }
6046         if (respectFlashMode) {
6047             uint8_t flashMode =
6048                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
6049             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
6050                                           sizeof(FLASH_MODES_MAP),
6051                                           flashMode);
6052             CDBG_HIGH("%s: flash mode after mapping %d", __func__, flashMode);
6053             // To check: CAM_INTF_META_FLASH_MODE usage
6054             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
6055                           sizeof(flashMode), &flashMode);
6056         }
6057     }
6058 
6059     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
6060         uint8_t flashPower =
6061             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
6062         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
6063                 sizeof(flashPower), &flashPower);
6064     }
6065 
6066     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
6067         int64_t flashFiringTime =
6068             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
6069         rc = AddSetParmEntryToBatch(hal_metadata,
6070                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
6071     }
6072 
6073     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
6074         uint8_t hotPixelMode =
6075             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
6076         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
6077                 sizeof(hotPixelMode), &hotPixelMode);
6078     }
6079 
6080     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
6081         float lensAperture =
6082             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
6083         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
6084                 sizeof(lensAperture), &lensAperture);
6085     }
6086 
6087     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
6088         float filterDensity =
6089             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
6090         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
6091                 sizeof(filterDensity), &filterDensity);
6092     }
6093 
6094     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
6095         float focalLength =
6096             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
6097         rc = AddSetParmEntryToBatch(hal_metadata,
6098                 CAM_INTF_META_LENS_FOCAL_LENGTH,
6099                 sizeof(focalLength), &focalLength);
6100     }
6101 
6102     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
6103         uint8_t optStabMode =
6104             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
6105         rc = AddSetParmEntryToBatch(hal_metadata,
6106                 CAM_INTF_META_LENS_OPT_STAB_MODE,
6107                 sizeof(optStabMode), &optStabMode);
6108     }
6109 
6110     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
6111         uint8_t noiseRedMode =
6112             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
6113         rc = AddSetParmEntryToBatch(hal_metadata,
6114                 CAM_INTF_META_NOISE_REDUCTION_MODE,
6115                 sizeof(noiseRedMode), &noiseRedMode);
6116     }
6117 
6118     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
6119         uint8_t noiseRedStrength =
6120             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
6121         rc = AddSetParmEntryToBatch(hal_metadata,
6122                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
6123                 sizeof(noiseRedStrength), &noiseRedStrength);
6124     }
6125 
6126     cam_crop_region_t scalerCropRegion;
6127     bool scalerCropSet = false;
6128     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
6129         scalerCropRegion.left =
6130             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
6131         scalerCropRegion.top =
6132             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
6133         scalerCropRegion.width =
6134             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
6135         scalerCropRegion.height =
6136             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
6137         rc = AddSetParmEntryToBatch(hal_metadata,
6138                 CAM_INTF_META_SCALER_CROP_REGION,
6139                 sizeof(scalerCropRegion), &scalerCropRegion);
6140         scalerCropSet = true;
6141     }
6142 
6143     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
6144         int64_t sensorExpTime =
6145             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
6146         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
6147         rc = AddSetParmEntryToBatch(hal_metadata,
6148                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
6149                 sizeof(sensorExpTime), &sensorExpTime);
6150     }
6151 
6152     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
6153         int64_t sensorFrameDuration =
6154             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
6155         int64_t minFrameDuration = getMinFrameDuration(request);
6156         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
6157         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
6158             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
6159         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
6160         rc = AddSetParmEntryToBatch(hal_metadata,
6161                 CAM_INTF_META_SENSOR_FRAME_DURATION,
6162                 sizeof(sensorFrameDuration), &sensorFrameDuration);
6163     }
6164 
6165     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
6166         int32_t sensorSensitivity =
6167             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
6168         if (sensorSensitivity <
6169                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
6170             sensorSensitivity =
6171                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
6172         if (sensorSensitivity >
6173                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
6174             sensorSensitivity =
6175                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
6176         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
6177         rc = AddSetParmEntryToBatch(hal_metadata,
6178                 CAM_INTF_META_SENSOR_SENSITIVITY,
6179                 sizeof(sensorSensitivity), &sensorSensitivity);
6180     }
6181 
6182     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
6183         uint8_t shadingMode =
6184             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
6185         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
6186                 sizeof(shadingMode), &shadingMode);
6187     }
6188 
6189     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
6190         uint8_t shadingStrength =
6191             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
6192         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
6193                 sizeof(shadingStrength), &shadingStrength);
6194     }
6195 
6196     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
6197         uint8_t fwk_facedetectMode =
6198             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
6199         uint8_t facedetectMode =
6200             lookupHalName(FACEDETECT_MODES_MAP,
6201                 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
6202         rc = AddSetParmEntryToBatch(hal_metadata,
6203                 CAM_INTF_META_STATS_FACEDETECT_MODE,
6204                 sizeof(facedetectMode), &facedetectMode);
6205     }
6206 
6207     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
6208         uint8_t histogramMode =
6209             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
6210         rc = AddSetParmEntryToBatch(hal_metadata,
6211                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
6212                 sizeof(histogramMode), &histogramMode);
6213     }
6214 
6215     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
6216         uint8_t sharpnessMapMode =
6217             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
6218         rc = AddSetParmEntryToBatch(hal_metadata,
6219                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
6220                 sizeof(sharpnessMapMode), &sharpnessMapMode);
6221     }
6222 
6223     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
6224         uint8_t tonemapMode =
6225             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
6226         rc = AddSetParmEntryToBatch(hal_metadata,
6227                 CAM_INTF_META_TONEMAP_MODE,
6228                 sizeof(tonemapMode), &tonemapMode);
6229     }
6230     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
6231     /*All tonemap channels will have the same number of points*/
6232     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
6233         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
6234         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
6235         cam_rgb_tonemap_curves tonemapCurves;
6236         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
6237         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6238             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
6239                     __func__, tonemapCurves.tonemap_points_cnt,
6240                     CAM_MAX_TONEMAP_CURVE_SIZE);
6241             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6242         }
6243 
6244         /* ch0 = G*/
6245         int point = 0;
6246         cam_tonemap_curve_t tonemapCurveGreen;
6247         for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
6248             for (int j = 0; j < 2; j++) {
6249                tonemapCurveGreen.tonemap_points[i][j] =
6250                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
6251                point++;
6252             }
6253         }
6254         tonemapCurves.curves[0] = tonemapCurveGreen;
6255 
6256         /* ch 1 = B */
6257         point = 0;
6258         cam_tonemap_curve_t tonemapCurveBlue;
6259         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6260             for (int j = 0; j < 2; j++) {
6261                tonemapCurveBlue.tonemap_points[i][j] =
6262                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
6263                point++;
6264             }
6265         }
6266         tonemapCurves.curves[1] = tonemapCurveBlue;
6267 
6268         /* ch 2 = R */
6269         point = 0;
6270         cam_tonemap_curve_t tonemapCurveRed;
6271         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6272             for (int j = 0; j < 2; j++) {
6273                tonemapCurveRed.tonemap_points[i][j] =
6274                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
6275                point++;
6276             }
6277         }
6278         tonemapCurves.curves[2] = tonemapCurveRed;
6279 
6280         rc = AddSetParmEntryToBatch(hal_metadata,
6281                 CAM_INTF_META_TONEMAP_CURVES,
6282                 sizeof(tonemapCurves), &tonemapCurves);
6283     }
6284 
6285     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
6286         uint8_t captureIntent =
6287             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
6288         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
6289                 sizeof(captureIntent), &captureIntent);
6290     }
6291 
6292     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
6293         uint8_t blackLevelLock =
6294             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
6295         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
6296                 sizeof(blackLevelLock), &blackLevelLock);
6297     }
6298 
6299     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
6300         uint8_t lensShadingMapMode =
6301             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
6302         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
6303                 sizeof(lensShadingMapMode), &lensShadingMapMode);
6304     }
6305 
6306     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
6307         cam_area_t roi;
6308         bool reset = true;
6309         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
6310         if (scalerCropSet) {
6311             reset = resetIfNeededROI(&roi, &scalerCropRegion);
6312         }
6313         if (reset) {
6314             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
6315                     sizeof(roi), &roi);
6316         }
6317     }
6318 
6319     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
6320         cam_area_t roi;
6321         bool reset = true;
6322         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
6323         if (scalerCropSet) {
6324             reset = resetIfNeededROI(&roi, &scalerCropRegion);
6325         }
6326         if (reset) {
6327             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
6328                     sizeof(roi), &roi);
6329         }
6330     }
6331 
6332     // CDS
6333     if (frame_settings.exists(QCAMERA3_CDS_MODE)) {
6334         int32_t* cds =
6335             frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
6336         if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) {
6337             ALOGE("%s: Invalid CDS mode %d!", __func__, *cds);
6338         } else {
6339             cam_cds_mode_type_t mode = (cam_cds_mode_type_t) *cds;
6340             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_CDS_MODE,
6341                 sizeof(mode), &mode);
6342         }
6343     }
6344 
6345     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
6346         cam_test_pattern_data_t testPatternData;
6347         uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
6348         uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
6349                sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
6350 
6351         memset(&testPatternData, 0, sizeof(testPatternData));
6352         testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
6353         if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
6354                 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
6355             int32_t* fwk_testPatternData = frame_settings.find(
6356                     ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
6357             testPatternData.r = fwk_testPatternData[0];
6358             testPatternData.b = fwk_testPatternData[3];
6359             switch (gCamCapability[mCameraId]->color_arrangement) {
6360             case CAM_FILTER_ARRANGEMENT_RGGB:
6361             case CAM_FILTER_ARRANGEMENT_GRBG:
6362                 testPatternData.gr = fwk_testPatternData[1];
6363                 testPatternData.gb = fwk_testPatternData[2];
6364                 break;
6365             case CAM_FILTER_ARRANGEMENT_GBRG:
6366             case CAM_FILTER_ARRANGEMENT_BGGR:
6367                 testPatternData.gr = fwk_testPatternData[2];
6368                 testPatternData.gb = fwk_testPatternData[1];
6369                 break;
6370             default:
6371                 ALOGE("%s: color arrangement %d is not supported", __func__,
6372                     gCamCapability[mCameraId]->color_arrangement);
6373                 break;
6374             }
6375         }
6376         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
6377             sizeof(testPatternData), &testPatternData);
6378     }
6379 
6380     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
6381         double *gps_coords =
6382             frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
6383         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
6384     }
6385 
6386     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
6387         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
6388         const char *gps_methods_src = (const char *)
6389                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
6390         uint32_t count = frame_settings.find(
6391                 ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
6392         memset(gps_methods, '\0', sizeof(gps_methods));
6393         strncpy(gps_methods, gps_methods_src, sizeof(gps_methods)-1);
6394         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
6395     }
6396 
6397     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
6398         int64_t gps_timestamp =
6399             frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
6400         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
6401     }
6402 
6403     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6404         int32_t orientation =
6405             frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6406         cam_rotation_info_t rotation_info;
6407         if (orientation == 0) {
6408            rotation_info.rotation = ROTATE_0;
6409         } else if (orientation == 90) {
6410            rotation_info.rotation = ROTATE_90;
6411         } else if (orientation == 180) {
6412            rotation_info.rotation = ROTATE_180;
6413         } else if (orientation == 270) {
6414            rotation_info.rotation = ROTATE_270;
6415         }
6416         rotation_info.streamId = snapshotStreamId;
6417         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
6418         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ROTATION, sizeof(rotation_info), &rotation_info);
6419     }
6420 
6421     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
6422         int8_t quality =
6423             frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
6424         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
6425     }
6426 
6427     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
6428         int8_t thumb_quality =
6429             frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
6430         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
6431     }
6432 
6433     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6434         cam_dimension_t dim;
6435         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6436         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6437         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
6438     }
6439 
6440     // Internal metadata
6441     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
6442         int32_t* privatedata =
6443                 frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.i32;
6444         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
6445                 sizeof(int32_t) * MAX_METADATA_PRIVATE_PAYLOAD_SIZE, privatedata);
6446     }
6447 
6448     // EV step
6449     rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
6450             sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
6451 
6452     return rc;
6453 }
6454 
6455 /*===========================================================================
6456  * FUNCTION   : captureResultCb
6457  *
6458  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
6459  *
6460  * PARAMETERS :
6461  *   @frame  : frame information from mm-camera-interface
6462  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
6463  *   @userdata: userdata
6464  *
6465  * RETURN     : NONE
6466  *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,void * userdata)6467 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
6468                 camera3_stream_buffer_t *buffer,
6469                 uint32_t frame_number, void *userdata)
6470 {
6471     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
6472     if (hw == NULL) {
6473         ALOGE("%s: Invalid hw %p", __func__, hw);
6474         return;
6475     }
6476 
6477     hw->captureResultCb(metadata, buffer, frame_number);
6478     return;
6479 }
6480 
6481 
6482 /*===========================================================================
6483  * FUNCTION   : initialize
6484  *
6485  * DESCRIPTION: Pass framework callback pointers to HAL
6486  *
6487  * PARAMETERS :
6488  *
6489  *
6490  * RETURN     : Success : 0
6491  *              Failure: -ENODEV
6492  *==========================================================================*/
6493 
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)6494 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
6495                                   const camera3_callback_ops_t *callback_ops)
6496 {
6497     CDBG("%s: E", __func__);
6498     QCamera3HardwareInterface *hw =
6499         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6500     if (!hw) {
6501         ALOGE("%s: NULL camera device", __func__);
6502         return -ENODEV;
6503     }
6504 
6505     int rc = hw->initialize(callback_ops);
6506     CDBG("%s: X", __func__);
6507     return rc;
6508 }
6509 
6510 /*===========================================================================
6511  * FUNCTION   : configure_streams
6512  *
6513  * DESCRIPTION:
6514  *
6515  * PARAMETERS :
6516  *
6517  *
6518  * RETURN     : Success: 0
6519  *              Failure: -EINVAL (if stream configuration is invalid)
6520  *                       -ENODEV (fatal error)
6521  *==========================================================================*/
6522 
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)6523 int QCamera3HardwareInterface::configure_streams(
6524         const struct camera3_device *device,
6525         camera3_stream_configuration_t *stream_list)
6526 {
6527     CDBG("%s: E", __func__);
6528     QCamera3HardwareInterface *hw =
6529         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6530     if (!hw) {
6531         ALOGE("%s: NULL camera device", __func__);
6532         return -ENODEV;
6533     }
6534     int rc = hw->configureStreams(stream_list);
6535     CDBG("%s: X", __func__);
6536     return rc;
6537 }
6538 
6539 /*===========================================================================
6540  * FUNCTION   : construct_default_request_settings
6541  *
6542  * DESCRIPTION: Configure a settings buffer to meet the required use case
6543  *
6544  * PARAMETERS :
6545  *
6546  *
6547  * RETURN     : Success: Return valid metadata
6548  *              Failure: Return NULL
6549  *==========================================================================*/
6550 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)6551     construct_default_request_settings(const struct camera3_device *device,
6552                                         int type)
6553 {
6554 
6555     CDBG("%s: E", __func__);
6556     camera_metadata_t* fwk_metadata = NULL;
6557     QCamera3HardwareInterface *hw =
6558         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6559     if (!hw) {
6560         ALOGE("%s: NULL camera device", __func__);
6561         return NULL;
6562     }
6563 
6564     fwk_metadata = hw->translateCapabilityToMetadata(type);
6565 
6566     CDBG("%s: X", __func__);
6567     return fwk_metadata;
6568 }
6569 
6570 /*===========================================================================
6571  * FUNCTION   : process_capture_request
6572  *
6573  * DESCRIPTION:
6574  *
6575  * PARAMETERS :
6576  *
6577  *
6578  * RETURN     :
6579  *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)6580 int QCamera3HardwareInterface::process_capture_request(
6581                     const struct camera3_device *device,
6582                     camera3_capture_request_t *request)
6583 {
6584     CDBG("%s: E", __func__);
6585     QCamera3HardwareInterface *hw =
6586         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6587     if (!hw) {
6588         ALOGE("%s: NULL camera device", __func__);
6589         return -EINVAL;
6590     }
6591 
6592     int rc = hw->processCaptureRequest(request);
6593     CDBG("%s: X", __func__);
6594     return rc;
6595 }
6596 
6597 /*===========================================================================
6598  * FUNCTION   : dump
6599  *
6600  * DESCRIPTION:
6601  *
6602  * PARAMETERS :
6603  *
6604  *
6605  * RETURN     :
6606  *==========================================================================*/
6607 
dump(const struct camera3_device * device,int fd)6608 void QCamera3HardwareInterface::dump(
6609                 const struct camera3_device *device, int fd)
6610 {
6611     /* Log level property is read when "adb shell dumpsys media.camera" is
6612        called so that the log level can be controlled without restarting
6613        the media server */
6614     getLogLevel();
6615 
6616     CDBG("%s: E", __func__);
6617     QCamera3HardwareInterface *hw =
6618         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6619     if (!hw) {
6620         ALOGE("%s: NULL camera device", __func__);
6621         return;
6622     }
6623 
6624     hw->dump(fd);
6625     CDBG("%s: X", __func__);
6626     return;
6627 }
6628 
6629 /*===========================================================================
6630  * FUNCTION   : flush
6631  *
6632  * DESCRIPTION:
6633  *
6634  * PARAMETERS :
6635  *
6636  *
6637  * RETURN     :
6638  *==========================================================================*/
6639 
flush(const struct camera3_device * device)6640 int QCamera3HardwareInterface::flush(
6641                 const struct camera3_device *device)
6642 {
6643     int rc;
6644     CDBG("%s: E", __func__);
6645     QCamera3HardwareInterface *hw =
6646         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6647     if (!hw) {
6648         ALOGE("%s: NULL camera device", __func__);
6649         return -EINVAL;
6650     }
6651 
6652     rc = hw->flush();
6653     CDBG("%s: X", __func__);
6654     return rc;
6655 }
6656 
6657 /*===========================================================================
6658  * FUNCTION   : close_camera_device
6659  *
6660  * DESCRIPTION:
6661  *
6662  * PARAMETERS :
6663  *
6664  *
6665  * RETURN     :
6666  *==========================================================================*/
close_camera_device(struct hw_device_t * device)6667 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
6668 {
6669     CDBG("%s: E", __func__);
6670     int ret = NO_ERROR;
6671     QCamera3HardwareInterface *hw =
6672         reinterpret_cast<QCamera3HardwareInterface *>(
6673             reinterpret_cast<camera3_device_t *>(device)->priv);
6674     if (!hw) {
6675         ALOGE("NULL camera device");
6676         return BAD_VALUE;
6677     }
6678     delete hw;
6679 
6680     CDBG("%s: X", __func__);
6681     return ret;
6682 }
6683 
6684 /*===========================================================================
6685  * FUNCTION   : getWaveletDenoiseProcessPlate
6686  *
6687  * DESCRIPTION: query wavelet denoise process plate
6688  *
6689  * PARAMETERS : None
6690  *
6691  * RETURN     : WNR prcocess plate vlaue
6692  *==========================================================================*/
getWaveletDenoiseProcessPlate()6693 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
6694 {
6695     char prop[PROPERTY_VALUE_MAX];
6696     memset(prop, 0, sizeof(prop));
6697     property_get("persist.denoise.process.plates", prop, "0");
6698     int processPlate = atoi(prop);
6699     switch(processPlate) {
6700     case 0:
6701         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
6702     case 1:
6703         return CAM_WAVELET_DENOISE_CBCR_ONLY;
6704     case 2:
6705         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
6706     case 3:
6707         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
6708     default:
6709         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
6710     }
6711 }
6712 
6713 /*===========================================================================
6714  * FUNCTION   : needRotationReprocess
6715  *
6716  * DESCRIPTION: if rotation needs to be done by reprocess in pp
6717  *
6718  * PARAMETERS : none
6719  *
6720  * RETURN     : true: needed
6721  *              false: no need
6722  *==========================================================================*/
needRotationReprocess()6723 bool QCamera3HardwareInterface::needRotationReprocess()
6724 {
6725     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
6726         // current rotation is not zero, and pp has the capability to process rotation
6727         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
6728         return true;
6729     }
6730 
6731     return false;
6732 }
6733 
6734 /*===========================================================================
6735  * FUNCTION   : needReprocess
6736  *
6737  * DESCRIPTION: if reprocess in needed
6738  *
6739  * PARAMETERS : none
6740  *
6741  * RETURN     : true: needed
6742  *              false: no need
6743  *==========================================================================*/
needReprocess(uint32_t postprocess_mask)6744 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
6745 {
6746     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
6747         // TODO: add for ZSL HDR later
6748         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
6749         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
6750             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
6751             return true;
6752         } else {
6753             CDBG_HIGH("%s: already post processed frame", __func__);
6754             return false;
6755         }
6756     }
6757     return needRotationReprocess();
6758 }
6759 
6760 /*===========================================================================
6761  * FUNCTION   : needJpegRotation
6762  *
6763  * DESCRIPTION: if rotation from jpeg is needed
6764  *
6765  * PARAMETERS : none
6766  *
6767  * RETURN     : true: needed
6768  *              false: no need
6769  *==========================================================================*/
needJpegRotation()6770 bool QCamera3HardwareInterface::needJpegRotation()
6771 {
6772    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
6773     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
6774        CDBG("%s: Need Jpeg to do the rotation", __func__);
6775        return true;
6776     }
6777     return false;
6778 }
6779 
6780 /*===========================================================================
6781  * FUNCTION   : addOfflineReprocChannel
6782  *
6783  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
6784  *              coming from input channel
6785  *
6786  * PARAMETERS :
6787  *   @config  : reprocess configuration
6788  *
6789  *
6790  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
6791  *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3PicChannel * picChHandle,metadata_buffer_t * metadata)6792 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
6793         const reprocess_config_t &config, QCamera3PicChannel *picChHandle,
6794         metadata_buffer_t *metadata)
6795 {
6796     int32_t rc = NO_ERROR;
6797     QCamera3ReprocessChannel *pChannel = NULL;
6798 
6799     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
6800             mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, picChHandle);
6801     if (NULL == pChannel) {
6802         ALOGE("%s: no mem for reprocess channel", __func__);
6803         return NULL;
6804     }
6805 
6806     rc = pChannel->initialize(IS_TYPE_NONE, mCaptureIntent);
6807     if (rc != NO_ERROR) {
6808         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
6809         delete pChannel;
6810         return NULL;
6811     }
6812 
6813     // pp feature config
6814     cam_pp_feature_config_t pp_config;
6815     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
6816 
6817     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET;
6818 
6819     rc = pChannel->addReprocStreamsFromSource(pp_config,
6820             config,
6821             IS_TYPE_NONE,
6822             mMetadataChannel);
6823 
6824     if (rc != NO_ERROR) {
6825         delete pChannel;
6826         return NULL;
6827     }
6828     return pChannel;
6829 }
6830 
6831 
isCACEnabled()6832 bool  QCamera3HardwareInterface::isCACEnabled() {
6833     char prop[PROPERTY_VALUE_MAX];
6834     memset(prop, 0, sizeof(prop));
6835     property_get("persist.camera.feature.cac", prop, "0");
6836     int enableCAC = atoi(prop);
6837     return enableCAC;
6838 }
6839 /*===========================================================================
6840 * FUNCTION   : getLogLevel
6841 *
6842 * DESCRIPTION: Reads the log level property into a variable
6843 *
6844 * PARAMETERS :
6845 *   None
6846 *
6847 * RETURN     :
6848 *   None
6849 *==========================================================================*/
getLogLevel()6850 void QCamera3HardwareInterface::getLogLevel()
6851 {
6852     char prop[PROPERTY_VALUE_MAX];
6853 
6854     property_get("persist.camera.logs", prop, "0");
6855     gCamHal3LogLevel = atoi(prop);
6856 
6857     return;
6858 }
6859 
6860 }; //end namespace qcamera
6861