1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <sync/sync.h>
46 #include <gralloc_priv.h>
47 #include "util/QCameraFlash.h"
48 #include "QCamera3HWI.h"
49 #include "QCamera3Mem.h"
50 #include "QCamera3Channel.h"
51 #include "QCamera3PostProc.h"
52 #include "QCamera3VendorTags.h"
53
54 using namespace android;
55
56 namespace qcamera {
57
58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60 #define EMPTY_PIPELINE_DELAY 2
61 #define PARTIAL_RESULT_COUNT 2
62 #define FRAME_SKIP_DELAY 0
63 #define CAM_MAX_SYNC_LATENCY 4
64
65 #define VIDEO_4K_WIDTH 3840
66 #define VIDEO_4K_HEIGHT 2160
67
68 #define MAX_RAW_STREAMS 1
69 #define MAX_STALLING_STREAMS 1
70 #define MAX_PROCESSED_STREAMS 3
71 #define TIMEOUT_NEVER -1
72
73 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
74 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
75 volatile uint32_t gCamHal3LogLevel = 1;
76
77 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
78 {"On", CAM_CDS_MODE_ON},
79 {"Off", CAM_CDS_MODE_OFF},
80 {"Auto",CAM_CDS_MODE_AUTO}
81 };
82
83 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
84 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
85 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
86 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
87 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
88 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
89 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
90 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
91 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
92 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
93 };
94
95 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
96 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
97 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
98 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
99 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
100 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
101 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
102 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
103 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
104 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
105 };
106
107 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
108 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
109 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
110 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
111 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
112 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
113 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
114 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
115 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
116 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
117 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
118 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
119 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
120 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
121 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
122 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
123 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
124 };
125
126 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
127 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
128 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
129 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
130 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
131 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
132 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
133 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
134 };
135
136 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
137 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
138 CAM_COLOR_CORRECTION_ABERRATION_OFF },
139 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
140 CAM_COLOR_CORRECTION_ABERRATION_FAST },
141 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
142 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
143 };
144
145 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
146 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
147 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
148 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
149 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
150 };
151
152 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
153 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
154 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
155 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
156 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
157 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
158 };
159
160 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
161 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
162 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
163 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
164 };
165
166 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
167 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
168 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
169 };
170
171 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
172 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
173 CAM_FOCUS_UNCALIBRATED },
174 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
175 CAM_FOCUS_APPROXIMATE },
176 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
177 CAM_FOCUS_CALIBRATED }
178 };
179
180 const int32_t available_thumbnail_sizes[] = {0, 0,
181 176, 144,
182 320, 240,
183 432, 288,
184 480, 288,
185 512, 288,
186 512, 384};
187
188 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
189 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
190 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
191 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
192 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
193 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
194 };
195
196 /* Since there is no mapping for all the options some Android enum are not listed.
197 * Also, the order in this list is important because while mapping from HAL to Android it will
198 * traverse from lower to higher index which means that for HAL values that are map to different
199 * Android values, the traverse logic will select the first one found.
200 */
201 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
202 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
203 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
204 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
205 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
206 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
207 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
208 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
209 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
210 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
211 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
212 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
213 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
214 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
215 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
216 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
217 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
218 };
219
220 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
221 initialize: QCamera3HardwareInterface::initialize,
222 configure_streams: QCamera3HardwareInterface::configure_streams,
223 register_stream_buffers: NULL,
224 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
225 process_capture_request: QCamera3HardwareInterface::process_capture_request,
226 get_metadata_vendor_tag_ops: NULL,
227 dump: QCamera3HardwareInterface::dump,
228 flush: QCamera3HardwareInterface::flush,
229 reserved: {0},
230 };
231
232 /*===========================================================================
233 * FUNCTION : QCamera3HardwareInterface
234 *
235 * DESCRIPTION: constructor of QCamera3HardwareInterface
236 *
237 * PARAMETERS :
238 * @cameraId : camera ID
239 *
240 * RETURN : none
241 *==========================================================================*/
QCamera3HardwareInterface(int cameraId,const camera_module_callbacks_t * callbacks)242 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
243 const camera_module_callbacks_t *callbacks)
244 : mCameraId(cameraId),
245 mCameraHandle(NULL),
246 mCameraOpened(false),
247 mCameraInitialized(false),
248 mCallbackOps(NULL),
249 mMetadataChannel(NULL),
250 mPictureChannel(NULL),
251 mRawChannel(NULL),
252 mSupportChannel(NULL),
253 mRawDumpChannel(NULL),
254 mFirstRequest(false),
255 mFlush(false),
256 mParamHeap(NULL),
257 mParameters(NULL),
258 m_bIsVideo(false),
259 m_bIs4KVideo(false),
260 mEisEnable(0),
261 mLoopBackResult(NULL),
262 mMinProcessedFrameDuration(0),
263 mMinJpegFrameDuration(0),
264 mMinRawFrameDuration(0),
265 m_pPowerModule(NULL),
266 mMetaFrameCount(0),
267 mCallbacks(callbacks),
268 mCaptureIntent(0)
269 {
270 getLogLevel();
271 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
272 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
273 mCameraDevice.common.close = close_camera_device;
274 mCameraDevice.ops = &mCameraOps;
275 mCameraDevice.priv = this;
276 gCamCapability[cameraId]->version = CAM_HAL_V3;
277 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
278 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
279 gCamCapability[cameraId]->min_num_pp_bufs = 3;
280
281 pthread_cond_init(&mRequestCond, NULL);
282 mPendingRequest = 0;
283 mCurrentRequestId = -1;
284 pthread_mutex_init(&mMutex, NULL);
285
286 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
287 mDefaultMetadata[i] = NULL;
288
289 #ifdef HAS_MULTIMEDIA_HINTS
290 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
291 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
292 }
293 #endif
294
295 char prop[PROPERTY_VALUE_MAX];
296 property_get("persist.camera.raw.dump", prop, "0");
297 mEnableRawDump = atoi(prop);
298 if (mEnableRawDump)
299 CDBG("%s: Raw dump from Camera HAL enabled", __func__);
300 }
301
302 /*===========================================================================
303 * FUNCTION : ~QCamera3HardwareInterface
304 *
305 * DESCRIPTION: destructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS : none
308 *
309 * RETURN : none
310 *==========================================================================*/
~QCamera3HardwareInterface()311 QCamera3HardwareInterface::~QCamera3HardwareInterface()
312 {
313 CDBG("%s: E", __func__);
314 /* We need to stop all streams before deleting any stream */
315
316
317 if (mRawDumpChannel) {
318 mRawDumpChannel->stop();
319 }
320
321 // NOTE: 'camera3_stream_t *' objects are already freed at
322 // this stage by the framework
323 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
324 it != mStreamInfo.end(); it++) {
325 QCamera3Channel *channel = (*it)->channel;
326 if (channel) {
327 channel->stop();
328 }
329 }
330 if (mSupportChannel)
331 mSupportChannel->stop();
332
333 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
334 it != mStreamInfo.end(); it++) {
335 QCamera3Channel *channel = (*it)->channel;
336 if (channel)
337 delete channel;
338 free (*it);
339 }
340 if (mSupportChannel) {
341 delete mSupportChannel;
342 mSupportChannel = NULL;
343 }
344
345 if (mRawDumpChannel) {
346 delete mRawDumpChannel;
347 mRawDumpChannel = NULL;
348 }
349 mPictureChannel = NULL;
350
351 /* Clean up all channels */
352 if (mCameraInitialized) {
353 if (mMetadataChannel) {
354 mMetadataChannel->stop();
355 delete mMetadataChannel;
356 mMetadataChannel = NULL;
357 }
358 deinitParameters();
359 }
360
361 if (mCameraOpened)
362 closeCamera();
363
364 mPendingBuffersMap.mPendingBufferList.clear();
365 mPendingRequestsList.clear();
366 mPendingReprocessResultList.clear();
367
368 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
369 if (mDefaultMetadata[i])
370 free_camera_metadata(mDefaultMetadata[i]);
371
372 pthread_cond_destroy(&mRequestCond);
373
374 pthread_mutex_destroy(&mMutex);
375 CDBG("%s: X", __func__);
376 }
377
378 /*===========================================================================
379 * FUNCTION : camEvtHandle
380 *
381 * DESCRIPTION: Function registered to mm-camera-interface to handle events
382 *
383 * PARAMETERS :
384 * @camera_handle : interface layer camera handle
385 * @evt : ptr to event
386 * @user_data : user data ptr
387 *
388 * RETURN : none
389 *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)390 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
391 mm_camera_event_t *evt,
392 void *user_data)
393 {
394 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
395 camera3_notify_msg_t notify_msg;
396 if (obj && evt) {
397 switch(evt->server_event_type) {
398 case CAM_EVENT_TYPE_DAEMON_DIED:
399 ALOGE("%s: Fatal, camera daemon died", __func__);
400
401 //close the camera backend
402 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
403 && obj->mCameraHandle->ops) {
404 obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
405 } else {
406 ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
407 __func__);
408 }
409 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
410 notify_msg.type = CAMERA3_MSG_ERROR;
411 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
412 notify_msg.message.error.error_stream = NULL;
413 notify_msg.message.error.frame_number = 0;
414 obj->mCallbackOps->notify(obj->mCallbackOps, ¬ify_msg);
415 break;
416
417 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
418 CDBG("%s: HAL got request pull from Daemon", __func__);
419 pthread_mutex_lock(&obj->mMutex);
420 obj->mWokenUpByDaemon = true;
421 obj->unblockRequestIfNecessary();
422 pthread_mutex_unlock(&obj->mMutex);
423 break;
424
425 default:
426 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
427 evt->server_event_type);
428 break;
429 }
430 } else {
431 ALOGE("%s: NULL user_data/evt", __func__);
432 }
433 }
434
435 /*===========================================================================
436 * FUNCTION : openCamera
437 *
438 * DESCRIPTION: open camera
439 *
440 * PARAMETERS :
441 * @hw_device : double ptr for camera device struct
442 *
443 * RETURN : int32_t type of status
444 * NO_ERROR -- success
445 * none-zero failure code
446 *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)447 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
448 {
449 int rc = 0;
450 if (mCameraOpened) {
451 *hw_device = NULL;
452 return PERMISSION_DENIED;
453 }
454
455 rc = openCamera();
456 if (rc == 0) {
457 *hw_device = &mCameraDevice.common;
458 } else
459 *hw_device = NULL;
460
461 #ifdef HAS_MULTIMEDIA_HINTS
462 if (rc == 0) {
463 if (m_pPowerModule) {
464 if (m_pPowerModule->powerHint) {
465 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
466 (void *)"state=1");
467 }
468 }
469 }
470 #endif
471 return rc;
472 }
473
474 /*===========================================================================
475 * FUNCTION : openCamera
476 *
477 * DESCRIPTION: open camera
478 *
479 * PARAMETERS : none
480 *
481 * RETURN : int32_t type of status
482 * NO_ERROR -- success
483 * none-zero failure code
484 *==========================================================================*/
openCamera()485 int QCamera3HardwareInterface::openCamera()
486 {
487 int rc = 0;
488
489 ATRACE_CALL();
490 if (mCameraHandle) {
491 ALOGE("Failure: Camera already opened");
492 return ALREADY_EXISTS;
493 }
494
495 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
496 if (rc < 0) {
497 ALOGE("%s: Failed to reserve flash for camera id: %d",
498 __func__,
499 mCameraId);
500 return UNKNOWN_ERROR;
501 }
502
503 mCameraHandle = camera_open(mCameraId);
504 if (!mCameraHandle) {
505 ALOGE("camera_open failed.");
506 return UNKNOWN_ERROR;
507 }
508
509 mCameraOpened = true;
510
511 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
512 camEvtHandle, (void *)this);
513
514 if (rc < 0) {
515 ALOGE("%s: Error, failed to register event callback", __func__);
516 /* Not closing camera here since it is already handled in destructor */
517 return FAILED_TRANSACTION;
518 }
519
520 return NO_ERROR;
521 }
522
523 /*===========================================================================
524 * FUNCTION : closeCamera
525 *
526 * DESCRIPTION: close camera
527 *
528 * PARAMETERS : none
529 *
530 * RETURN : int32_t type of status
531 * NO_ERROR -- success
532 * none-zero failure code
533 *==========================================================================*/
closeCamera()534 int QCamera3HardwareInterface::closeCamera()
535 {
536 ATRACE_CALL();
537 int rc = NO_ERROR;
538
539 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
540 mCameraHandle = NULL;
541 mCameraOpened = false;
542
543 #ifdef HAS_MULTIMEDIA_HINTS
544 if (rc == NO_ERROR) {
545 if (m_pPowerModule) {
546 if (m_pPowerModule->powerHint) {
547 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
548 (void *)"state=0");
549 }
550 }
551 }
552 #endif
553
554 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
555 CDBG("%s: Failed to release flash for camera id: %d",
556 __func__,
557 mCameraId);
558 }
559
560 return rc;
561 }
562
563 /*===========================================================================
564 * FUNCTION : initialize
565 *
566 * DESCRIPTION: Initialize frameworks callback functions
567 *
568 * PARAMETERS :
569 * @callback_ops : callback function to frameworks
570 *
571 * RETURN :
572 *
573 *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)574 int QCamera3HardwareInterface::initialize(
575 const struct camera3_callback_ops *callback_ops)
576 {
577 ATRACE_CALL();
578 int rc;
579
580 pthread_mutex_lock(&mMutex);
581
582 rc = initParameters();
583 if (rc < 0) {
584 ALOGE("%s: initParamters failed %d", __func__, rc);
585 goto err1;
586 }
587 mCallbackOps = callback_ops;
588
589 pthread_mutex_unlock(&mMutex);
590 mCameraInitialized = true;
591 return 0;
592
593 err1:
594 pthread_mutex_unlock(&mMutex);
595 return rc;
596 }
597
598 /*===========================================================================
599 * FUNCTION : validateStreamDimensions
600 *
601 * DESCRIPTION: Check if the configuration requested are those advertised
602 *
603 * PARAMETERS :
604 * @stream_list : streams to be configured
605 *
606 * RETURN :
607 *
608 *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)609 int QCamera3HardwareInterface::validateStreamDimensions(
610 camera3_stream_configuration_t *streamList)
611 {
612 int rc = NO_ERROR;
613 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
614 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
615 uint8_t jpeg_sizes_cnt = 0;
616
617 camera3_stream_t *inputStream = NULL;
618 /*
619 * Loop through all streams to find input stream if it exists*
620 */
621 for (size_t i = 0; i< streamList->num_streams; i++) {
622 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
623 if (inputStream != NULL) {
624 ALOGE("%s: Error, Multiple input streams requested", __func__);
625 return -EINVAL;
626 }
627 inputStream = streamList->streams[i];
628 }
629 }
630 /*
631 * Loop through all streams requested in configuration
632 * Check if unsupported sizes have been requested on any of them
633 */
634 for (size_t j = 0; j < streamList->num_streams; j++){
635 bool sizeFound = false;
636 camera3_stream_t *newStream = streamList->streams[j];
637
638 /*
639 * Sizes are different for each type of stream format check against
640 * appropriate table.
641 */
642 switch (newStream->format) {
643 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
644 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
645 case HAL_PIXEL_FORMAT_RAW10:
646 for (int i = 0;
647 i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
648 if (gCamCapability[mCameraId]->raw_dim[i].width
649 == (int32_t) newStream->width
650 && gCamCapability[mCameraId]->raw_dim[i].height
651 == (int32_t) newStream->height) {
652 sizeFound = true;
653 break;
654 }
655 }
656 break;
657 case HAL_PIXEL_FORMAT_BLOB:
658 /* Generate JPEG sizes table */
659 makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
660 gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
661 available_processed_sizes);
662 jpeg_sizes_cnt = filterJpegSizes(
663 available_jpeg_sizes,
664 available_processed_sizes,
665 (gCamCapability[mCameraId]->picture_sizes_tbl_cnt) * 2,
666 MAX_SIZES_CNT * 2,
667 gCamCapability[mCameraId]->active_array_size,
668 gCamCapability[mCameraId]->max_downscale_factor);
669
670 /* Verify set size against generated sizes table */
671 for (int i = 0;i < jpeg_sizes_cnt/2; i++) {
672 if ((int32_t)(newStream->width) == available_jpeg_sizes[i*2] &&
673 (int32_t)(newStream->height) == available_jpeg_sizes[i*2+1]) {
674 sizeFound = true;
675 break;
676 }
677 }
678 break;
679
680
681 case HAL_PIXEL_FORMAT_YCbCr_420_888:
682 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
683 default:
684 /* ZSL stream will be full active array size validate that*/
685 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
686 || newStream->stream_type == CAMERA3_STREAM_INPUT
687 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL){
688 if ((int32_t)(newStream->width) ==
689 gCamCapability[mCameraId]->active_array_size.width
690 && (int32_t)(newStream->height) ==
691 gCamCapability[mCameraId]->active_array_size.height) {
692 sizeFound = true;
693 }
694 /* We could potentially break here to enforce ZSL stream
695 * set from frameworks always has full active array size
696 * but it is not clear from spec if framework will always
697 * follow that, also we have logic to override to full array
698 * size, so keeping this logic lenient at the moment.
699 */
700 }
701
702 /* Non ZSL stream still need to conform to advertised sizes*/
703 for (int i = 0;
704 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
705 if ((int32_t)(newStream->width) ==
706 gCamCapability[mCameraId]->picture_sizes_tbl[i].width
707 && (int32_t)(newStream->height) ==
708 gCamCapability[mCameraId]->picture_sizes_tbl[i].height){
709 sizeFound = true;
710 break;
711 }
712 }
713 break;
714 } /* End of switch(newStream->format) */
715
716 /* We error out even if a single stream has unsupported size set */
717 if (!sizeFound) {
718 ALOGE("%s: Error: Unsupported size of %d x %d requested for stream"
719 "type:%d", __func__, newStream->width, newStream->height,
720 newStream->format);
721 ALOGE("%s: Active array size is %d x %d", __func__,
722 gCamCapability[mCameraId]->active_array_size.width,
723 gCamCapability[mCameraId]->active_array_size.height);
724 rc = -EINVAL;
725 break;
726 }
727 } /* End of for each stream */
728 return rc;
729 }
730
731 /*==============================================================================
732 * FUNCTION : isSupportChannelNeeded
733 *
734 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
735 *
736 * PARAMETERS :
737 * @stream_list : streams to be configured
738 *
739 * RETURN : Boolen true/false decision
740 *
741 *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)742 bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
743 cam_stream_size_info_t stream_config_info)
744 {
745 uint32_t i;
746 bool bSuperSetPresent = false;
747 /* Check for conditions where PProc pipeline does not have any streams*/
748 for (i = 0; i < stream_config_info.num_streams; i++) {
749 if (stream_config_info.postprocess_mask[i] == CAM_QCOM_FEATURE_PP_SUPERSET) {
750 bSuperSetPresent = true;
751 break;
752 }
753 }
754
755 if (bSuperSetPresent == false )
756 return true;
757
758 /* Dummy stream needed if only raw or jpeg streams present */
759 for (i = 0;i < streamList->num_streams;i++) {
760 switch(streamList->streams[i]->format) {
761 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
762 case HAL_PIXEL_FORMAT_RAW10:
763 case HAL_PIXEL_FORMAT_RAW16:
764 case HAL_PIXEL_FORMAT_BLOB:
765 break;
766 default:
767 return false;
768 }
769 }
770 return true;
771 }
772
773
774 /*===========================================================================
775 * FUNCTION : configureStreams
776 *
777 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
778 * and output streams.
779 *
780 * PARAMETERS :
781 * @stream_list : streams to be configured
782 *
783 * RETURN :
784 *
785 *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)786 int QCamera3HardwareInterface::configureStreams(
787 camera3_stream_configuration_t *streamList)
788 {
789 ATRACE_CALL();
790 int rc = 0;
791
792 // Sanity check stream_list
793 if (streamList == NULL) {
794 ALOGE("%s: NULL stream configuration", __func__);
795 return BAD_VALUE;
796 }
797 if (streamList->streams == NULL) {
798 ALOGE("%s: NULL stream list", __func__);
799 return BAD_VALUE;
800 }
801
802 if (streamList->num_streams < 1) {
803 ALOGE("%s: Bad number of streams requested: %d", __func__,
804 streamList->num_streams);
805 return BAD_VALUE;
806 }
807
808 /* first invalidate all the steams in the mStreamList
809 * if they appear again, they will be validated */
810 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
811 it != mStreamInfo.end(); it++) {
812 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
813 channel->stop();
814 (*it)->status = INVALID;
815 }
816
817 if (mRawDumpChannel) {
818 mRawDumpChannel->stop();
819 delete mRawDumpChannel;
820 mRawDumpChannel = NULL;
821 }
822
823 if (mSupportChannel)
824 mSupportChannel->stop();
825 if (mMetadataChannel) {
826 /* If content of mStreamInfo is not 0, there is metadata stream */
827 mMetadataChannel->stop();
828 }
829
830 pthread_mutex_lock(&mMutex);
831
832 /* Check whether we have video stream */
833 m_bIs4KVideo = false;
834 m_bIsVideo = false;
835 bool isZsl = false;
836 size_t videoWidth = 0;
837 size_t videoHeight = 0;
838 size_t rawStreamCnt = 0;
839 size_t stallStreamCnt = 0;
840 size_t processedStreamCnt = 0;
841 // Number of streams on ISP encoder path
842 size_t numStreamsOnEncoder = 0;
843 cam_dimension_t maxViewfinderSize;
844 bool bJpegExceeds4K = false;
845 bool bUseCommonFeatureMask = false;
846 uint32_t commonFeatureMask = 0;
847 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
848 camera3_stream_t *inputStream = NULL;
849
850 for (size_t i = 0; i < streamList->num_streams; i++) {
851 camera3_stream_t *newStream = streamList->streams[i];
852 CDBG_HIGH("%s: stream[%d] type = %d, format = %d, width = %d, height = %d",
853 __func__, i, newStream->stream_type, newStream->format,
854 newStream->width, newStream->height);
855
856 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
857 newStream->stream_type == CAMERA3_STREAM_INPUT){
858 isZsl = true;
859 }
860 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
861 inputStream = newStream;
862 }
863
864 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
865 if (newStream->width > VIDEO_4K_WIDTH ||
866 newStream->height > VIDEO_4K_HEIGHT)
867 bJpegExceeds4K = true;
868 }
869
870 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
871 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
872 m_bIsVideo = true;
873
874 if ((VIDEO_4K_WIDTH <= newStream->width) &&
875 (VIDEO_4K_HEIGHT <= newStream->height)) {
876 videoWidth = newStream->width;
877 videoHeight = newStream->height;
878 m_bIs4KVideo = true;
879 }
880 }
881 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
882 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
883 switch (newStream->format) {
884 case HAL_PIXEL_FORMAT_BLOB:
885 stallStreamCnt++;
886 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
887 newStream->height > (uint32_t)maxViewfinderSize.height) {
888 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
889 numStreamsOnEncoder++;
890 }
891 break;
892 case HAL_PIXEL_FORMAT_RAW10:
893 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
894 case HAL_PIXEL_FORMAT_RAW16:
895 rawStreamCnt++;
896 break;
897 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
898 processedStreamCnt++;
899 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
900 newStream->height > (uint32_t)maxViewfinderSize.height) {
901 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
902 newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
903 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
904 } else {
905 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
906 }
907 numStreamsOnEncoder++;
908 }
909 break;
910 case HAL_PIXEL_FORMAT_YCbCr_420_888:
911 default:
912 processedStreamCnt++;
913 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
914 newStream->height > (uint32_t)maxViewfinderSize.height) {
915 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
916 numStreamsOnEncoder++;
917 }
918 break;
919 }
920
921 }
922 }
923
924 /* Check if num_streams is sane */
925 if (stallStreamCnt > MAX_STALLING_STREAMS ||
926 rawStreamCnt > MAX_RAW_STREAMS ||
927 processedStreamCnt > MAX_PROCESSED_STREAMS) {
928 ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
929 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
930 pthread_mutex_unlock(&mMutex);
931 return -EINVAL;
932 }
933 /* Check whether we have zsl stream or 4k video case */
934 if (isZsl && m_bIsVideo) {
935 ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
936 pthread_mutex_unlock(&mMutex);
937 return -EINVAL;
938 }
939 /* Check if stream sizes are sane */
940 if (numStreamsOnEncoder > 2) {
941 ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
942 __func__);
943 pthread_mutex_unlock(&mMutex);
944 return -EINVAL;
945 } else if (1 < numStreamsOnEncoder){
946 bUseCommonFeatureMask = true;
947 CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
948 __func__);
949 }
950 /* Check if BLOB size is greater than 4k in 4k recording case */
951 if (m_bIs4KVideo && bJpegExceeds4K) {
952 ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
953 __func__);
954 pthread_mutex_unlock(&mMutex);
955 return -EINVAL;
956 }
957
958 rc = validateStreamDimensions(streamList);
959 if (rc != NO_ERROR) {
960 ALOGE("%s: Invalid stream configuration requested!", __func__);
961 pthread_mutex_unlock(&mMutex);
962 return rc;
963 }
964
965 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
966 camera3_stream_t *jpegStream = NULL;
967 cam_stream_size_info_t stream_config_info;
968 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
969 for (size_t i = 0; i < streamList->num_streams; i++) {
970 camera3_stream_t *newStream = streamList->streams[i];
971 CDBG_HIGH("%s: newStream type = %d, stream format = %d stream size : %d x %d",
972 __func__, newStream->stream_type, newStream->format,
973 newStream->width, newStream->height);
974 //if the stream is in the mStreamList validate it
975 bool stream_exists = false;
976 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
977 it != mStreamInfo.end(); it++) {
978 if ((*it)->stream == newStream) {
979 QCamera3Channel *channel =
980 (QCamera3Channel*)(*it)->stream->priv;
981 stream_exists = true;
982 if (channel)
983 delete channel;
984 (*it)->status = VALID;
985 (*it)->stream->priv = NULL;
986 (*it)->channel = NULL;
987 }
988 }
989 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
990 //new stream
991 stream_info_t* stream_info;
992 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
993 stream_info->stream = newStream;
994 stream_info->status = VALID;
995 stream_info->channel = NULL;
996 mStreamInfo.push_back(stream_info);
997 }
998 /* Covers Opaque ZSL and API1 F/W ZSL */
999 if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL
1000 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1001 if (zslStream != NULL) {
1002 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1003 pthread_mutex_unlock(&mMutex);
1004 return BAD_VALUE;
1005 }
1006 zslStream = newStream;
1007 }
1008 /* Covers YUV reprocess */
1009 if (inputStream != NULL) {
1010 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1011 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1012 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1013 && inputStream->width == newStream->width
1014 && inputStream->height == newStream->height) {
1015 if (zslStream != NULL) {
1016 /* This scenario indicates multiple YUV streams with same size
1017 * as input stream have been requested, since zsl stream handle
1018 * is solely use for the purpose of overriding the size of streams
1019 * which share h/w streams we will just make a guess here as to
1020 * which of the stream is a ZSL stream, this will be refactored
1021 * once we make generic logic for streams sharing encoder output
1022 */
1023 CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1024 }
1025 zslStream = newStream;
1026 }
1027 }
1028 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1029 jpegStream = newStream;
1030 }
1031 }
1032
1033 cleanAndSortStreamInfo();
1034 if (mMetadataChannel) {
1035 delete mMetadataChannel;
1036 mMetadataChannel = NULL;
1037 }
1038 if (mSupportChannel) {
1039 delete mSupportChannel;
1040 mSupportChannel = NULL;
1041 }
1042
1043 //Create metadata channel and initialize it
1044 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1045 mCameraHandle->ops, captureResultCb,
1046 &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1047 if (mMetadataChannel == NULL) {
1048 ALOGE("%s: failed to allocate metadata channel", __func__);
1049 rc = -ENOMEM;
1050 pthread_mutex_unlock(&mMutex);
1051 return rc;
1052 }
1053 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1054 if (rc < 0) {
1055 ALOGE("%s: metadata channel initialization failed", __func__);
1056 delete mMetadataChannel;
1057 mMetadataChannel = NULL;
1058 pthread_mutex_unlock(&mMutex);
1059 return rc;
1060 }
1061
1062 bool isRawStreamRequested = false;
1063 /* Allocate channel objects for the requested streams */
1064 for (size_t i = 0; i < streamList->num_streams; i++) {
1065 camera3_stream_t *newStream = streamList->streams[i];
1066 uint32_t stream_usage = newStream->usage;
1067 stream_config_info.stream_sizes[stream_config_info.num_streams].width = newStream->width;
1068 stream_config_info.stream_sizes[stream_config_info.num_streams].height = newStream->height;
1069 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1070 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
1071 && newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
1072 && jpegStream){
1073 //for zsl stream the size is active array size
1074 // FIXME remove this for api zsl bidirectional is alway active array size
1075 // and for hal 3.3 reprocess, we will *indirectly* control using input size.
1076 // there is a grey area however when the application whimsically decides to create
1077 // a standalone zsl stream whose size < jpeg blob size
1078 /*
1079 stream_config_info.stream_sizes[stream_config_info.num_streams].width =
1080 gCamCapability[mCameraId]->active_array_size.width;
1081 stream_config_info.stream_sizes[stream_config_info.num_streams].height =
1082 gCamCapability[mCameraId]->active_array_size.height;
1083 */
1084 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1085 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
1086 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1087 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1088 } else {
1089 //for non zsl streams find out the format
1090 switch (newStream->format) {
1091 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1092 {
1093 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1094 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_VIDEO;
1095 } else {
1096 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1097 }
1098 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
1099 }
1100 break;
1101 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1102 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1103 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
1104 break;
1105 case HAL_PIXEL_FORMAT_BLOB:
1106 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1107 if (m_bIs4KVideo && !isZsl) {
1108 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
1109 } else {
1110 if (bUseCommonFeatureMask &&
1111 (newStream->width > (uint32_t)maxViewfinderSize.width ||
1112 newStream->height > (uint32_t)maxViewfinderSize.height)) {
1113 stream_config_info.postprocess_mask[stream_config_info.num_streams] = commonFeatureMask;
1114 } else {
1115 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
1116 }
1117 }
1118 if (isZsl) {
1119 if (zslStream) {
1120 stream_config_info.stream_sizes[stream_config_info.num_streams].width =
1121 zslStream->width;
1122 stream_config_info.stream_sizes[stream_config_info.num_streams].height =
1123 zslStream->height;
1124 } else {
1125 ALOGE("%s: Error, No ZSL stream identified",__func__);
1126 pthread_mutex_unlock(&mMutex);
1127 return -EINVAL;
1128 }
1129 } else if (m_bIs4KVideo) {
1130 stream_config_info.stream_sizes[stream_config_info.num_streams].width = videoWidth;
1131 stream_config_info.stream_sizes[stream_config_info.num_streams].height = videoHeight;
1132 }
1133 break;
1134 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1135 case HAL_PIXEL_FORMAT_RAW16:
1136 case HAL_PIXEL_FORMAT_RAW10:
1137 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_RAW;
1138 isRawStreamRequested = true;
1139 break;
1140 default:
1141 stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1142 stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
1143 break;
1144 }
1145
1146 }
1147 if (newStream->priv == NULL) {
1148 //New stream, construct channel
1149 switch (newStream->stream_type) {
1150 case CAMERA3_STREAM_INPUT:
1151 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
1152 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1153 break;
1154 case CAMERA3_STREAM_BIDIRECTIONAL:
1155 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
1156 GRALLOC_USAGE_HW_CAMERA_WRITE;
1157 break;
1158 case CAMERA3_STREAM_OUTPUT:
1159 /* For video encoding stream, set read/write rarely
1160 * flag so that they may be set to un-cached */
1161 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1162 newStream->usage =
1163 (GRALLOC_USAGE_SW_READ_RARELY |
1164 GRALLOC_USAGE_SW_WRITE_RARELY |
1165 GRALLOC_USAGE_HW_CAMERA_WRITE);
1166 else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
1167 CDBG("%s: ZSL usage flag skipping", __func__);
1168 else
1169 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
1170 break;
1171 default:
1172 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1173 break;
1174 }
1175
1176 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1177 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1178 QCamera3Channel *channel = NULL;
1179 switch (newStream->format) {
1180 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1181 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1182 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
1183 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1184 mCameraHandle->ops, captureResultCb,
1185 &gCamCapability[mCameraId]->padding_info,
1186 this,
1187 newStream,
1188 (cam_stream_type_t) stream_config_info.type[stream_config_info.num_streams],
1189 stream_config_info.postprocess_mask[stream_config_info.num_streams]);
1190 if (channel == NULL) {
1191 ALOGE("%s: allocation of channel failed", __func__);
1192 pthread_mutex_unlock(&mMutex);
1193 return -ENOMEM;
1194 }
1195
1196 newStream->priv = channel;
1197 break;
1198 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1199 case HAL_PIXEL_FORMAT_RAW16:
1200 case HAL_PIXEL_FORMAT_RAW10:
1201 newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
1202 mRawChannel = new QCamera3RawChannel(
1203 mCameraHandle->camera_handle,
1204 mCameraHandle->ops, captureResultCb,
1205 &gCamCapability[mCameraId]->padding_info,
1206 this, newStream, CAM_QCOM_FEATURE_NONE,
1207 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1208 if (mRawChannel == NULL) {
1209 ALOGE("%s: allocation of raw channel failed", __func__);
1210 pthread_mutex_unlock(&mMutex);
1211 return -ENOMEM;
1212 }
1213
1214 newStream->priv = (QCamera3Channel*)mRawChannel;
1215 break;
1216 case HAL_PIXEL_FORMAT_BLOB:
1217 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
1218 mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1219 mCameraHandle->ops, captureResultCb,
1220 &gCamCapability[mCameraId]->padding_info, this, newStream,
1221 stream_config_info.postprocess_mask[stream_config_info.num_streams],
1222 m_bIs4KVideo, mMetadataChannel);
1223 if (mPictureChannel == NULL) {
1224 ALOGE("%s: allocation of channel failed", __func__);
1225 pthread_mutex_unlock(&mMutex);
1226 return -ENOMEM;
1227 }
1228 newStream->priv = (QCamera3Channel*)mPictureChannel;
1229 break;
1230
1231 default:
1232 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1233 break;
1234 }
1235 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1236 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1237 } else {
1238 ALOGE("%s: Error, Unknown stream type", __func__);
1239 return -EINVAL;
1240 }
1241
1242 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1243 it != mStreamInfo.end(); it++) {
1244 if ((*it)->stream == newStream) {
1245 (*it)->channel = (QCamera3Channel*) newStream->priv;
1246 break;
1247 }
1248 }
1249 } else {
1250 // Channel already exists for this stream
1251 // Do nothing for now
1252 }
1253
1254 /* Do not add entries for input stream in metastream info
1255 * since there is no real stream associated with it
1256 */
1257 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1258 stream_config_info.num_streams++;
1259 }
1260
1261 if (isZsl) {
1262 if (zslStream == NULL) {
1263 ALOGE("%s: Error Zsl stream handle missing", __func__);
1264 pthread_mutex_unlock(&mMutex);
1265 return -EINVAL;
1266 }
1267 /* This override is possible since the f/w gaurantees that the ZSL
1268 stream will always be the active array size in case of Bidirectional
1269 or will be limited to the max i/p stream size which we can control to
1270 be equal to be the largest YUV/Opaque stream size
1271 */
1272 if (mPictureChannel) {
1273 mPictureChannel->overrideYuvSize(zslStream->width, zslStream->height);
1274 }
1275 } else if (mPictureChannel && m_bIs4KVideo) {
1276 mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
1277 }
1278
1279 if (isSupportChannelNeeded(streamList, stream_config_info)) {
1280 mSupportChannel = new QCamera3SupportChannel(
1281 mCameraHandle->camera_handle,
1282 mCameraHandle->ops,
1283 &gCamCapability[mCameraId]->padding_info,
1284 CAM_QCOM_FEATURE_NONE,
1285 this);
1286 if (!mSupportChannel) {
1287 ALOGE("%s: dummy channel cannot be created", __func__);
1288 pthread_mutex_unlock(&mMutex);
1289 return -ENOMEM;
1290 }
1291 }
1292
1293 //RAW DUMP channel
1294 if (mEnableRawDump && isRawStreamRequested == false){
1295 cam_dimension_t rawDumpSize;
1296 rawDumpSize = getMaxRawSize(mCameraId);
1297 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1298 mCameraHandle->ops,
1299 rawDumpSize,
1300 &gCamCapability[mCameraId]->padding_info,
1301 this, CAM_QCOM_FEATURE_NONE);
1302 if (!mRawDumpChannel) {
1303 ALOGE("%s: Raw Dump channel cannot be created", __func__);
1304 pthread_mutex_unlock(&mMutex);
1305 return -ENOMEM;
1306 }
1307 }
1308
1309
1310 if (mSupportChannel) {
1311 stream_config_info.stream_sizes[stream_config_info.num_streams] =
1312 QCamera3SupportChannel::kDim;
1313 stream_config_info.type[stream_config_info.num_streams] =
1314 CAM_STREAM_TYPE_CALLBACK;
1315 stream_config_info.postprocess_mask[stream_config_info.num_streams] =
1316 CAM_QCOM_FEATURE_PP_SUPERSET;
1317 stream_config_info.num_streams++;
1318 }
1319
1320 if (mRawDumpChannel) {
1321 cam_dimension_t rawSize;
1322 rawSize = getMaxRawSize(mCameraId);
1323 stream_config_info.stream_sizes[stream_config_info.num_streams] =
1324 rawSize;
1325 stream_config_info.type[stream_config_info.num_streams] =
1326 CAM_STREAM_TYPE_RAW;
1327 stream_config_info.num_streams++;
1328 }
1329
1330 // settings/parameters don't carry over for new configureStreams
1331 int32_t hal_version = CAM_HAL_V3;
1332 memset(mParameters, 0, sizeof(metadata_buffer_t));
1333
1334 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1335 sizeof(hal_version), &hal_version);
1336
1337 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
1338 sizeof(cam_stream_size_info_t), &stream_config_info);
1339
1340 int32_t tintless_value = 1;
1341 AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
1342 sizeof(tintless_value), &tintless_value);
1343
1344 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1345
1346 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1347 mPendingRequestsList.clear();
1348 mPendingFrameDropList.clear();
1349 // Initialize/Reset the pending buffers list
1350 mPendingBuffersMap.num_buffers = 0;
1351 mPendingBuffersMap.mPendingBufferList.clear();
1352 mPendingReprocessResultList.clear();
1353
1354 mFirstRequest = true;
1355
1356 //Get min frame duration for this streams configuration
1357 deriveMinFrameDuration();
1358
1359 pthread_mutex_unlock(&mMutex);
1360 return rc;
1361 }
1362
1363 /*===========================================================================
1364 * FUNCTION : validateCaptureRequest
1365 *
1366 * DESCRIPTION: validate a capture request from camera service
1367 *
1368 * PARAMETERS :
1369 * @request : request from framework to process
1370 *
1371 * RETURN :
1372 *
1373 *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1374 int QCamera3HardwareInterface::validateCaptureRequest(
1375 camera3_capture_request_t *request)
1376 {
1377 ssize_t idx = 0;
1378 const camera3_stream_buffer_t *b;
1379 CameraMetadata meta;
1380
1381 /* Sanity check the request */
1382 if (request == NULL) {
1383 ALOGE("%s: NULL capture request", __func__);
1384 return BAD_VALUE;
1385 }
1386
1387 if (request->settings == NULL && mFirstRequest) {
1388 /*settings cannot be null for the first request*/
1389 return BAD_VALUE;
1390 }
1391
1392 uint32_t frameNumber = request->frame_number;
1393 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1394 ALOGE("%s: Request %d: No output buffers provided!",
1395 __FUNCTION__, frameNumber);
1396 return BAD_VALUE;
1397 }
1398 if (request->input_buffer != NULL) {
1399 b = request->input_buffer;
1400 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1401 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1402 __func__, frameNumber, (long)idx);
1403 return BAD_VALUE;
1404 }
1405 if (b->release_fence != -1) {
1406 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1407 __func__, frameNumber, (long)idx);
1408 return BAD_VALUE;
1409 }
1410 if (b->buffer == NULL) {
1411 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1412 __func__, frameNumber, (long)idx);
1413 return BAD_VALUE;
1414 }
1415 }
1416
1417 // Validate all buffers
1418 b = request->output_buffers;
1419 do {
1420 QCamera3Channel *channel =
1421 static_cast<QCamera3Channel*>(b->stream->priv);
1422 if (channel == NULL) {
1423 ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1424 __func__, frameNumber, (long)idx);
1425 return BAD_VALUE;
1426 }
1427 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1428 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1429 __func__, frameNumber, (long)idx);
1430 return BAD_VALUE;
1431 }
1432 if (b->release_fence != -1) {
1433 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1434 __func__, frameNumber, (long)idx);
1435 return BAD_VALUE;
1436 }
1437 if (b->buffer == NULL) {
1438 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1439 __func__, frameNumber, (long)idx);
1440 return BAD_VALUE;
1441 }
1442 if (*(b->buffer) == NULL) {
1443 ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1444 __func__, frameNumber, (long)idx);
1445 return BAD_VALUE;
1446 }
1447 idx++;
1448 b = request->output_buffers + idx;
1449 } while (idx < (ssize_t)request->num_output_buffers);
1450
1451 return NO_ERROR;
1452 }
1453
1454 /*===========================================================================
1455 * FUNCTION : deriveMinFrameDuration
1456 *
1457 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1458 * on currently configured streams.
1459 *
1460 * PARAMETERS : NONE
1461 *
1462 * RETURN : NONE
1463 *
1464 *==========================================================================*/
deriveMinFrameDuration()1465 void QCamera3HardwareInterface::deriveMinFrameDuration()
1466 {
1467 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1468
1469 maxJpegDim = 0;
1470 maxProcessedDim = 0;
1471 maxRawDim = 0;
1472
1473 // Figure out maximum jpeg, processed, and raw dimensions
1474 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1475 it != mStreamInfo.end(); it++) {
1476
1477 // Input stream doesn't have valid stream_type
1478 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1479 continue;
1480
1481 int32_t dimension = (*it)->stream->width * (*it)->stream->height;
1482 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1483 if (dimension > maxJpegDim)
1484 maxJpegDim = dimension;
1485 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1486 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1487 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1488 if (dimension > maxRawDim)
1489 maxRawDim = dimension;
1490 } else {
1491 if (dimension > maxProcessedDim)
1492 maxProcessedDim = dimension;
1493 }
1494 }
1495
1496 //Assume all jpeg dimensions are in processed dimensions.
1497 if (maxJpegDim > maxProcessedDim)
1498 maxProcessedDim = maxJpegDim;
1499 //Find the smallest raw dimension that is greater or equal to jpeg dimension
1500 if (maxProcessedDim > maxRawDim) {
1501 maxRawDim = INT32_MAX;
1502 for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1503 i++) {
1504
1505 int32_t dimension =
1506 gCamCapability[mCameraId]->raw_dim[i].width *
1507 gCamCapability[mCameraId]->raw_dim[i].height;
1508
1509 if (dimension >= maxProcessedDim && dimension < maxRawDim)
1510 maxRawDim = dimension;
1511 }
1512 }
1513
1514 //Find minimum durations for processed, jpeg, and raw
1515 for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1516 i++) {
1517 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1518 gCamCapability[mCameraId]->raw_dim[i].height) {
1519 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1520 break;
1521 }
1522 }
1523 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1524 if (maxProcessedDim ==
1525 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1526 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1527 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1528 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1529 break;
1530 }
1531 }
1532 }
1533
1534 /*===========================================================================
1535 * FUNCTION : getMinFrameDuration
1536 *
1537 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1538 * and current request configuration.
1539 *
1540 * PARAMETERS : @request: requset sent by the frameworks
1541 *
1542 * RETURN : min farme duration for a particular request
1543 *
1544 *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)1545 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1546 {
1547 bool hasJpegStream = false;
1548 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1549 const camera3_stream_t *stream = request->output_buffers[i].stream;
1550 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1551 hasJpegStream = true;
1552 }
1553
1554 if (!hasJpegStream)
1555 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1556 else
1557 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1558 }
1559
1560 /*===========================================================================
1561 * FUNCTION : handlePendingReprocResults
1562 *
1563 * DESCRIPTION: check and notify on any pending reprocess results
1564 *
1565 * PARAMETERS :
1566 * @frame_number : Pending request frame number
1567 *
1568 * RETURN : int32_t type of status
1569 * NO_ERROR -- success
1570 * none-zero failure code
1571 *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)1572 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
1573 {
1574 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
1575 j != mPendingReprocessResultList.end(); j++) {
1576 if (j->frame_number == frame_number) {
1577 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
1578
1579 CDBG("%s: Delayed reprocess notify %d", __func__,
1580 frame_number);
1581
1582 for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
1583 k != mPendingRequestsList.end(); k++) {
1584
1585 if (k->frame_number == j->frame_number) {
1586 CDBG("%s: Found reprocess frame number %d in pending reprocess List "
1587 "Take it out!!", __func__,
1588 k->frame_number);
1589
1590 camera3_capture_result result;
1591 memset(&result, 0, sizeof(camera3_capture_result));
1592 result.frame_number = frame_number;
1593 result.num_output_buffers = 1;
1594 result.output_buffers = &j->buffer;
1595 result.input_buffer = k->input_buffer;
1596 result.result = k->settings;
1597 result.partial_result = PARTIAL_RESULT_COUNT;
1598 mCallbackOps->process_capture_result(mCallbackOps, &result);
1599
1600 mPendingRequestsList.erase(k);
1601 mPendingRequest--;
1602 break;
1603 }
1604 }
1605 mPendingReprocessResultList.erase(j);
1606 break;
1607 }
1608 }
1609 return NO_ERROR;
1610 }
1611
1612 /*===========================================================================
1613 * FUNCTION : handleMetadataWithLock
1614 *
1615 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1616 *
1617 * PARAMETERS : @metadata_buf: metadata buffer
1618 *
1619 * RETURN :
1620 *
1621 *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf)1622 void QCamera3HardwareInterface::handleMetadataWithLock(
1623 mm_camera_super_buf_t *metadata_buf)
1624 {
1625 ATRACE_CALL();
1626
1627 int32_t frame_number_valid = 0;
1628 uint32_t frame_number = 0;
1629 int64_t capture_time = 0;
1630 int32_t urgent_frame_number_valid = 0;
1631 uint32_t urgent_frame_number = 0;
1632
1633 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1634 cam_frame_dropped_t cam_frame_drop =
1635 *(cam_frame_dropped_t *) POINTER_OF_META(CAM_INTF_META_FRAME_DROPPED, metadata);
1636
1637 int32_t *p_frame_number_valid =
1638 (int32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1639 uint32_t *p_frame_number =
1640 (uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
1641 int64_t *p_capture_time =
1642 (int64_t *) POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1643 int32_t *p_urgent_frame_number_valid =
1644 (int32_t *) POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1645 uint32_t *p_urgent_frame_number =
1646 (uint32_t *) POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1647
1648 if ((NULL == p_frame_number_valid) ||
1649 (NULL == p_frame_number) ||
1650 (NULL == p_capture_time) ||
1651 (NULL == p_urgent_frame_number_valid) ||
1652 (NULL == p_urgent_frame_number))
1653 {
1654 mMetadataChannel->bufDone(metadata_buf);
1655 free(metadata_buf);
1656 goto done_metadata;
1657 }
1658 else
1659 {
1660 frame_number_valid = *p_frame_number_valid;
1661 frame_number = *p_frame_number;
1662 capture_time = *p_capture_time;
1663 urgent_frame_number_valid = *p_urgent_frame_number_valid;
1664 urgent_frame_number = *p_urgent_frame_number;
1665 }
1666
1667 if (urgent_frame_number_valid) {
1668 CDBG("%s: valid urgent frame_number = %d, capture_time = %lld",
1669 __func__, urgent_frame_number, capture_time);
1670
1671 //Recieved an urgent Frame Number, handle it
1672 //using partial results
1673 for (List<PendingRequestInfo>::iterator i =
1674 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1675 CDBG("%s: Iterator Frame = %d urgent frame = %d",
1676 __func__, i->frame_number, urgent_frame_number);
1677
1678 if (i->frame_number < urgent_frame_number &&
1679 i->partial_result_cnt == 0) {
1680 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
1681 __func__, i->frame_number);
1682 }
1683
1684 if (i->frame_number == urgent_frame_number &&
1685 i->bUrgentReceived == 0) {
1686
1687 camera3_capture_result_t result;
1688 memset(&result, 0, sizeof(camera3_capture_result_t));
1689
1690 i->partial_result_cnt++;
1691 i->bUrgentReceived = 1;
1692 // Extract 3A metadata
1693 result.result =
1694 translateCbUrgentMetadataToResultMetadata(metadata);
1695
1696 if (result.result == NULL)
1697 {
1698 CameraMetadata dummyMetadata;
1699 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1700 &i->timestamp, 1);
1701 dummyMetadata.update(ANDROID_REQUEST_ID,
1702 &(i->request_id), 1);
1703 result.result = dummyMetadata.release();
1704 }
1705
1706 // Populate metadata result
1707 result.frame_number = urgent_frame_number;
1708 result.num_output_buffers = 0;
1709 result.output_buffers = NULL;
1710 result.partial_result = i->partial_result_cnt;
1711
1712 mCallbackOps->process_capture_result(mCallbackOps, &result);
1713 CDBG("%s: urgent frame_number = %d, capture_time = %lld",
1714 __func__, result.frame_number, capture_time);
1715 free_camera_metadata((camera_metadata_t *)result.result);
1716 break;
1717 }
1718 }
1719 }
1720
1721 if (!frame_number_valid) {
1722 CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
1723 mMetadataChannel->bufDone(metadata_buf);
1724 free(metadata_buf);
1725 goto done_metadata;
1726 }
1727 CDBG("%s: valid frame_number = %d, capture_time = %lld", __func__,
1728 frame_number, capture_time);
1729
1730 // Go through the pending requests info and send shutter/results to frameworks
1731 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1732 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1733 camera3_capture_result_t result;
1734 memset(&result, 0, sizeof(camera3_capture_result_t));
1735
1736 CDBG("%s: frame_number in the list is %d", __func__, i->frame_number);
1737 i->partial_result_cnt++;
1738 result.partial_result = i->partial_result_cnt;
1739
1740 // Flush out all entries with less or equal frame numbers.
1741 mPendingRequest--;
1742
1743 // Check whether any stream buffer corresponding to this is dropped or not
1744 // If dropped, then send the ERROR_BUFFER for the corresponding stream
1745 if (cam_frame_drop.frame_dropped) {
1746 camera3_notify_msg_t notify_msg;
1747 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1748 j != i->buffers.end(); j++) {
1749 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1750 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1751 for (uint32_t k = 0; k < cam_frame_drop.cam_stream_ID.num_streams; k++) {
1752 if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1753 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1754 CDBG("%s: Start of reporting error frame#=%d, streamID=%d",
1755 __func__, i->frame_number, streamID);
1756 notify_msg.type = CAMERA3_MSG_ERROR;
1757 notify_msg.message.error.frame_number = i->frame_number;
1758 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1759 notify_msg.message.error.error_stream = j->stream;
1760 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1761 CDBG("%s: End of reporting error frame#=%d, streamID=%d",
1762 __func__, i->frame_number, streamID);
1763 PendingFrameDropInfo PendingFrameDrop;
1764 PendingFrameDrop.frame_number=i->frame_number;
1765 PendingFrameDrop.stream_ID = streamID;
1766 // Add the Frame drop info to mPendingFrameDropList
1767 mPendingFrameDropList.push_back(PendingFrameDrop);
1768 }
1769 }
1770 }
1771 }
1772
1773 // Send empty metadata with already filled buffers for dropped metadata
1774 // and send valid metadata with already filled buffers for current metadata
1775 if (i->frame_number < frame_number) {
1776 camera3_notify_msg_t notify_msg;
1777 notify_msg.type = CAMERA3_MSG_SHUTTER;
1778 notify_msg.message.shutter.frame_number = i->frame_number;
1779 notify_msg.message.shutter.timestamp = capture_time -
1780 (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1781 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1782 i->timestamp = notify_msg.message.shutter.timestamp;
1783 CDBG("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1784 __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1785
1786 CameraMetadata dummyMetadata;
1787 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1788 &i->timestamp, 1);
1789 dummyMetadata.update(ANDROID_REQUEST_ID,
1790 &(i->request_id), 1);
1791 result.result = dummyMetadata.release();
1792 } else {
1793 // Send shutter notify to frameworks
1794 camera3_notify_msg_t notify_msg;
1795 notify_msg.type = CAMERA3_MSG_SHUTTER;
1796 notify_msg.message.shutter.frame_number = i->frame_number;
1797 notify_msg.message.shutter.timestamp = capture_time;
1798 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1799
1800 i->timestamp = capture_time;
1801
1802 result.result = translateFromHalMetadata(metadata,
1803 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
1804 i->capture_intent);
1805
1806 if (i->blob_request) {
1807 {
1808 //Dump tuning metadata if enabled and available
1809 char prop[PROPERTY_VALUE_MAX];
1810 memset(prop, 0, sizeof(prop));
1811 property_get("persist.camera.dumpmetadata", prop, "0");
1812 int32_t enabled = atoi(prop);
1813 if (enabled && metadata->is_tuning_params_valid) {
1814 dumpMetadataToFile(metadata->tuning_params,
1815 mMetaFrameCount,
1816 enabled,
1817 "Snapshot",
1818 frame_number);
1819 }
1820 }
1821
1822
1823 mPictureChannel->queueReprocMetadata(metadata_buf);
1824 } else {
1825 // Return metadata buffer
1826 mMetadataChannel->bufDone(metadata_buf);
1827 free(metadata_buf);
1828 }
1829 }
1830 if (!result.result) {
1831 ALOGE("%s: metadata is NULL", __func__);
1832 }
1833 result.frame_number = i->frame_number;
1834 result.num_output_buffers = 0;
1835 result.output_buffers = NULL;
1836 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1837 j != i->buffers.end(); j++) {
1838 if (j->buffer) {
1839 result.num_output_buffers++;
1840 }
1841 }
1842
1843 if (result.num_output_buffers > 0) {
1844 camera3_stream_buffer_t *result_buffers =
1845 new camera3_stream_buffer_t[result.num_output_buffers];
1846 if (!result_buffers) {
1847 ALOGE("%s: Fatal error: out of memory", __func__);
1848 }
1849 size_t result_buffers_idx = 0;
1850 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1851 j != i->buffers.end(); j++) {
1852 if (j->buffer) {
1853 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1854 m != mPendingFrameDropList.end(); m++) {
1855 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1856 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1857 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
1858 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1859 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1860 __func__, frame_number, streamID);
1861 m = mPendingFrameDropList.erase(m);
1862 break;
1863 }
1864 }
1865
1866 for (List<PendingBufferInfo>::iterator k =
1867 mPendingBuffersMap.mPendingBufferList.begin();
1868 k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1869 if (k->buffer == j->buffer->buffer) {
1870 CDBG("%s: Found buffer %p in pending buffer List "
1871 "for frame %d, Take it out!!", __func__,
1872 k->buffer, k->frame_number);
1873 mPendingBuffersMap.num_buffers--;
1874 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1875 break;
1876 }
1877 }
1878
1879 result_buffers[result_buffers_idx++] = *(j->buffer);
1880 free(j->buffer);
1881 j->buffer = NULL;
1882 }
1883 }
1884 result.output_buffers = result_buffers;
1885 mCallbackOps->process_capture_result(mCallbackOps, &result);
1886 CDBG("%s: meta frame_number = %d, capture_time = %lld",
1887 __func__, result.frame_number, i->timestamp);
1888 free_camera_metadata((camera_metadata_t *)result.result);
1889 delete[] result_buffers;
1890 } else {
1891 mCallbackOps->process_capture_result(mCallbackOps, &result);
1892 CDBG("%s: meta frame_number = %d, capture_time = %lld",
1893 __func__, result.frame_number, i->timestamp);
1894 free_camera_metadata((camera_metadata_t *)result.result);
1895 }
1896 // erase the element from the list
1897 i = mPendingRequestsList.erase(i);
1898
1899 if (!mPendingReprocessResultList.empty()) {
1900 handlePendingReprocResults(frame_number + 1);
1901 }
1902 }
1903
1904 done_metadata:
1905 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1906 i != mPendingRequestsList.end() ;i++) {
1907 i->pipeline_depth++;
1908 }
1909 unblockRequestIfNecessary();
1910
1911 }
1912
1913 /*===========================================================================
1914 * FUNCTION : handleBufferWithLock
1915 *
1916 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1917 *
1918 * PARAMETERS : @buffer: image buffer for the callback
1919 * @frame_number: frame number of the image buffer
1920 *
1921 * RETURN :
1922 *
1923 *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)1924 void QCamera3HardwareInterface::handleBufferWithLock(
1925 camera3_stream_buffer_t *buffer, uint32_t frame_number)
1926 {
1927 ATRACE_CALL();
1928 // If the frame number doesn't exist in the pending request list,
1929 // directly send the buffer to the frameworks, and update pending buffers map
1930 // Otherwise, book-keep the buffer.
1931 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1932 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1933 i++;
1934 }
1935 if (i == mPendingRequestsList.end()) {
1936 // Verify all pending requests frame_numbers are greater
1937 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1938 j != mPendingRequestsList.end(); j++) {
1939 if (j->frame_number < frame_number) {
1940 ALOGE("%s: Error: pending frame number %d is smaller than %d",
1941 __func__, j->frame_number, frame_number);
1942 }
1943 }
1944 camera3_capture_result_t result;
1945 memset(&result, 0, sizeof(camera3_capture_result_t));
1946 result.result = NULL;
1947 result.frame_number = frame_number;
1948 result.num_output_buffers = 1;
1949 result.partial_result = 0;
1950 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1951 m != mPendingFrameDropList.end(); m++) {
1952 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1953 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1954 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
1955 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1956 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1957 __func__, frame_number, streamID);
1958 m = mPendingFrameDropList.erase(m);
1959 break;
1960 }
1961 }
1962 result.output_buffers = buffer;
1963 CDBG("%s: result frame_number = %d, buffer = %p",
1964 __func__, frame_number, buffer->buffer);
1965
1966 for (List<PendingBufferInfo>::iterator k =
1967 mPendingBuffersMap.mPendingBufferList.begin();
1968 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1969 if (k->buffer == buffer->buffer) {
1970 CDBG("%s: Found Frame buffer, take it out from list",
1971 __func__);
1972
1973 mPendingBuffersMap.num_buffers--;
1974 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1975 break;
1976 }
1977 }
1978 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
1979 __func__, mPendingBuffersMap.num_buffers);
1980
1981 mCallbackOps->process_capture_result(mCallbackOps, &result);
1982 } else {
1983 if (i->input_buffer) {
1984 CameraMetadata settings;
1985 camera3_notify_msg_t notify_msg;
1986 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
1987 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
1988 if(i->settings) {
1989 settings = i->settings;
1990 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
1991 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
1992 } else {
1993 ALOGE("%s: No timestamp in input settings! Using current one.",
1994 __func__);
1995 }
1996 } else {
1997 ALOGE("%s: Input settings missing!", __func__);
1998 }
1999
2000 notify_msg.type = CAMERA3_MSG_SHUTTER;
2001 notify_msg.message.shutter.frame_number = frame_number;
2002 notify_msg.message.shutter.timestamp = capture_time;
2003
2004 if (i->input_buffer->release_fence != -1) {
2005 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2006 close(i->input_buffer->release_fence);
2007 if (rc != OK) {
2008 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2009 }
2010 }
2011
2012 for (List<PendingBufferInfo>::iterator k =
2013 mPendingBuffersMap.mPendingBufferList.begin();
2014 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2015 if (k->buffer == buffer->buffer) {
2016 CDBG("%s: Found Frame buffer, take it out from list",
2017 __func__);
2018
2019 mPendingBuffersMap.num_buffers--;
2020 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2021 break;
2022 }
2023 }
2024 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2025 __func__, mPendingBuffersMap.num_buffers);
2026
2027 bool notifyNow = true;
2028 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
2029 j != mPendingRequestsList.end(); j++) {
2030 if (j->frame_number < frame_number) {
2031 notifyNow = false;
2032 break;
2033 }
2034 }
2035
2036 if (notifyNow) {
2037 camera3_capture_result result;
2038 memset(&result, 0, sizeof(camera3_capture_result));
2039 result.frame_number = frame_number;
2040 result.result = i->settings;
2041 result.input_buffer = i->input_buffer;
2042 result.num_output_buffers = 1;
2043 result.output_buffers = buffer;
2044 result.partial_result = PARTIAL_RESULT_COUNT;
2045
2046 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2047 mCallbackOps->process_capture_result(mCallbackOps, &result);
2048 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2049 i = mPendingRequestsList.erase(i);
2050 mPendingRequest--;
2051 } else {
2052 // Cache reprocess result for later
2053 PendingReprocessResult pendingResult;
2054 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2055 pendingResult.notify_msg = notify_msg;
2056 pendingResult.buffer = *buffer;
2057 pendingResult.frame_number = frame_number;
2058 mPendingReprocessResultList.push_back(pendingResult);
2059 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2060 }
2061 } else {
2062 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2063 j != i->buffers.end(); j++) {
2064 if (j->stream == buffer->stream) {
2065 if (j->buffer != NULL) {
2066 ALOGE("%s: Error: buffer is already set", __func__);
2067 } else {
2068 j->buffer = (camera3_stream_buffer_t *)malloc(
2069 sizeof(camera3_stream_buffer_t));
2070 *(j->buffer) = *buffer;
2071 CDBG("%s: cache buffer %p at result frame_number %d",
2072 __func__, buffer, frame_number);
2073 }
2074 }
2075 }
2076 }
2077 }
2078 }
2079
2080 /*===========================================================================
2081 * FUNCTION : unblockRequestIfNecessary
2082 *
2083 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2084 * that mMutex is held when this function is called.
2085 *
2086 * PARAMETERS :
2087 *
2088 * RETURN :
2089 *
2090 *==========================================================================*/
unblockRequestIfNecessary()2091 void QCamera3HardwareInterface::unblockRequestIfNecessary()
2092 {
2093 // Unblock process_capture_request
2094 pthread_cond_signal(&mRequestCond);
2095 }
2096
2097 /*===========================================================================
2098 * FUNCTION : processCaptureRequest
2099 *
2100 * DESCRIPTION: process a capture request from camera service
2101 *
2102 * PARAMETERS :
2103 * @request : request from framework to process
2104 *
2105 * RETURN :
2106 *
2107 *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)2108 int QCamera3HardwareInterface::processCaptureRequest(
2109 camera3_capture_request_t *request)
2110 {
2111 ATRACE_CALL();
2112 int rc = NO_ERROR;
2113 int32_t request_id;
2114 CameraMetadata meta;
2115
2116 pthread_mutex_lock(&mMutex);
2117
2118 rc = validateCaptureRequest(request);
2119 if (rc != NO_ERROR) {
2120 ALOGE("%s: incoming request is not valid", __func__);
2121 pthread_mutex_unlock(&mMutex);
2122 return rc;
2123 }
2124
2125 meta = request->settings;
2126
2127 // For first capture request, send capture intent, and
2128 // stream on all streams
2129 if (mFirstRequest) {
2130
2131 /* get eis information for stream configuration */
2132 cam_is_type_t is_type;
2133 char is_type_value[PROPERTY_VALUE_MAX];
2134 property_get("camera.is_type", is_type_value, "0");
2135 is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2136
2137 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2138 int32_t hal_version = CAM_HAL_V3;
2139 uint8_t captureIntent =
2140 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2141 mCaptureIntent = captureIntent;
2142 memset(mParameters, 0, sizeof(parm_buffer_t));
2143 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
2144 sizeof(hal_version), &hal_version);
2145 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
2146 sizeof(captureIntent), &captureIntent);
2147 }
2148
2149 //If EIS is enabled, turn it on for video
2150 //for camera use case, front camcorder and 4k video, no eis
2151 bool setEis = mEisEnable && (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
2152 (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD ||
2153 mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
2154 int32_t vsMode;
2155 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2156 rc = AddSetParmEntryToBatch(mParameters,
2157 CAM_INTF_PARM_DIS_ENABLE,
2158 sizeof(vsMode), &vsMode);
2159
2160 //IS type will be 0 unless EIS is supported. If EIS is supported
2161 //it could either be 1 or 4 depending on the stream and video size
2162 if (setEis){
2163 if (m_bIs4KVideo) {
2164 is_type = IS_TYPE_DIS;
2165 } else {
2166 is_type = IS_TYPE_EIS_2_0;
2167 }
2168 }
2169
2170 for (size_t i = 0; i < request->num_output_buffers; i++) {
2171 const camera3_stream_buffer_t& output = request->output_buffers[i];
2172 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2173 /*for livesnapshot stream is_type will be DIS*/
2174 if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2175 rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS);
2176 } else {
2177 rc = channel->registerBuffer(output.buffer, is_type);
2178 }
2179 if (rc < 0) {
2180 ALOGE("%s: registerBuffer failed",
2181 __func__);
2182 pthread_mutex_unlock(&mMutex);
2183 return -ENODEV;
2184 }
2185 }
2186
2187 /*set the capture intent, hal version and dis enable parameters to the backend*/
2188 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2189 mParameters);
2190
2191
2192 //First initialize all streams
2193 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2194 it != mStreamInfo.end(); it++) {
2195 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2196 if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2197 rc = channel->initialize(IS_TYPE_DIS);
2198 } else {
2199 rc = channel->initialize(is_type);
2200 }
2201 if (NO_ERROR != rc) {
2202 ALOGE("%s : Channel initialization failed %d", __func__, rc);
2203 pthread_mutex_unlock(&mMutex);
2204 return rc;
2205 }
2206 }
2207
2208 if (mRawDumpChannel) {
2209 rc = mRawDumpChannel->initialize(is_type);
2210 if (rc != NO_ERROR) {
2211 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
2212 pthread_mutex_unlock(&mMutex);
2213 return rc;
2214 }
2215 }
2216 if (mSupportChannel) {
2217 rc = mSupportChannel->initialize(is_type);
2218 if (rc < 0) {
2219 ALOGE("%s: Support channel initialization failed", __func__);
2220 pthread_mutex_unlock(&mMutex);
2221 return rc;
2222 }
2223 }
2224
2225 //Then start them.
2226 CDBG_HIGH("%s: Start META Channel", __func__);
2227 rc = mMetadataChannel->start();
2228 if (rc < 0) {
2229 ALOGE("%s: META channel start failed", __func__);
2230 pthread_mutex_unlock(&mMutex);
2231 return rc;
2232 }
2233
2234 if (mSupportChannel) {
2235 rc = mSupportChannel->start();
2236 if (rc < 0) {
2237 ALOGE("%s: Support channel start failed", __func__);
2238 mMetadataChannel->stop();
2239 pthread_mutex_unlock(&mMutex);
2240 return rc;
2241 }
2242 }
2243 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2244 it != mStreamInfo.end(); it++) {
2245 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2246 CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
2247 rc = channel->start();
2248 if (rc < 0) {
2249 ALOGE("%s: channel start failed", __func__);
2250 pthread_mutex_unlock(&mMutex);
2251 return rc;
2252 }
2253 }
2254
2255 if (mRawDumpChannel) {
2256 CDBG("%s: Starting raw dump stream",__func__);
2257 rc = mRawDumpChannel->start();
2258 if (rc != NO_ERROR) {
2259 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
2260 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2261 it != mStreamInfo.end(); it++) {
2262 QCamera3Channel *channel =
2263 (QCamera3Channel *)(*it)->stream->priv;
2264 ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
2265 channel->getStreamTypeMask());
2266 channel->stop();
2267 }
2268 if (mSupportChannel)
2269 mSupportChannel->stop();
2270 mMetadataChannel->stop();
2271 pthread_mutex_unlock(&mMutex);
2272 return rc;
2273 }
2274 }
2275 mWokenUpByDaemon = false;
2276 mPendingRequest = 0;
2277 }
2278
2279 uint32_t frameNumber = request->frame_number;
2280 cam_stream_ID_t streamID;
2281
2282 if (meta.exists(ANDROID_REQUEST_ID)) {
2283 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
2284 mCurrentRequestId = request_id;
2285 CDBG("%s: Received request with id: %d",__func__, request_id);
2286 } else if (mFirstRequest || mCurrentRequestId == -1){
2287 ALOGE("%s: Unable to find request id field, \
2288 & no previous id available", __func__);
2289 return NAME_NOT_FOUND;
2290 } else {
2291 CDBG("%s: Re-using old request id", __func__);
2292 request_id = mCurrentRequestId;
2293 }
2294
2295 CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
2296 __func__, __LINE__,
2297 request->num_output_buffers,
2298 request->input_buffer,
2299 frameNumber);
2300 // Acquire all request buffers first
2301 streamID.num_streams = 0;
2302 int blob_request = 0;
2303 uint32_t snapshotStreamId = 0;
2304 for (size_t i = 0; i < request->num_output_buffers; i++) {
2305 const camera3_stream_buffer_t& output = request->output_buffers[i];
2306 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2307
2308 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2309 //Call function to store local copy of jpeg data for encode params.
2310 blob_request = 1;
2311 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
2312 }
2313
2314 if (output.acquire_fence != -1) {
2315 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
2316 close(output.acquire_fence);
2317 if (rc != OK) {
2318 ALOGE("%s: sync wait failed %d", __func__, rc);
2319 pthread_mutex_unlock(&mMutex);
2320 return rc;
2321 }
2322 }
2323
2324 streamID.streamID[streamID.num_streams] =
2325 channel->getStreamID(channel->getStreamTypeMask());
2326 streamID.num_streams++;
2327
2328
2329 }
2330
2331 if (blob_request && mRawDumpChannel) {
2332 CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
2333 streamID.streamID[streamID.num_streams] =
2334 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
2335 streamID.num_streams++;
2336 }
2337
2338 if(request->input_buffer == NULL) {
2339 rc = setFrameParameters(request, streamID, snapshotStreamId);
2340 if (rc < 0) {
2341 ALOGE("%s: fail to set frame parameters", __func__);
2342 pthread_mutex_unlock(&mMutex);
2343 return rc;
2344 }
2345 } else {
2346
2347 if (request->input_buffer->acquire_fence != -1) {
2348 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
2349 close(request->input_buffer->acquire_fence);
2350 if (rc != OK) {
2351 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2352 pthread_mutex_unlock(&mMutex);
2353 return rc;
2354 }
2355 }
2356 }
2357
2358 /* Update pending request list and pending buffers map */
2359 PendingRequestInfo pendingRequest;
2360 pendingRequest.frame_number = frameNumber;
2361 pendingRequest.num_buffers = request->num_output_buffers;
2362 pendingRequest.request_id = request_id;
2363 pendingRequest.blob_request = blob_request;
2364 pendingRequest.bUrgentReceived = 0;
2365
2366 pendingRequest.input_buffer = request->input_buffer;
2367 pendingRequest.settings = request->settings;
2368 pendingRequest.pipeline_depth = 0;
2369 pendingRequest.partial_result_cnt = 0;
2370 extractJpegMetadata(pendingRequest.jpegMetadata, request);
2371
2372 //extract capture intent
2373 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2374 mCaptureIntent =
2375 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2376 }
2377 pendingRequest.capture_intent = mCaptureIntent;
2378
2379 for (size_t i = 0; i < request->num_output_buffers; i++) {
2380 RequestedBufferInfo requestedBuf;
2381 requestedBuf.stream = request->output_buffers[i].stream;
2382 requestedBuf.buffer = NULL;
2383 pendingRequest.buffers.push_back(requestedBuf);
2384
2385 // Add to buffer handle the pending buffers list
2386 PendingBufferInfo bufferInfo;
2387 bufferInfo.frame_number = frameNumber;
2388 bufferInfo.buffer = request->output_buffers[i].buffer;
2389 bufferInfo.stream = request->output_buffers[i].stream;
2390 mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
2391 mPendingBuffersMap.num_buffers++;
2392 CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
2393 __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
2394 bufferInfo.stream->format);
2395 }
2396 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2397 __func__, mPendingBuffersMap.num_buffers);
2398
2399 mPendingRequestsList.push_back(pendingRequest);
2400
2401 if(mFlush) {
2402 pthread_mutex_unlock(&mMutex);
2403 return NO_ERROR;
2404 }
2405
2406 // Notify metadata channel we receive a request
2407 mMetadataChannel->request(NULL, frameNumber);
2408
2409 metadata_buffer_t reproc_meta;
2410 memset(&reproc_meta, 0, sizeof(metadata_buffer_t));
2411
2412 if(request->input_buffer != NULL){
2413 rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
2414 if (NO_ERROR != rc) {
2415 ALOGE("%s: fail to set reproc parameters", __func__);
2416 pthread_mutex_unlock(&mMutex);
2417 return rc;
2418 }
2419 }
2420
2421 // Call request on other streams
2422 for (size_t i = 0; i < request->num_output_buffers; i++) {
2423 const camera3_stream_buffer_t& output = request->output_buffers[i];
2424 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2425
2426 if (channel == NULL) {
2427 ALOGE("%s: invalid channel pointer for stream", __func__);
2428 continue;
2429 }
2430
2431 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2432 rc = channel->request(output.buffer, frameNumber,
2433 request->input_buffer, (request->input_buffer)? &reproc_meta : mParameters);
2434 if (rc < 0) {
2435 ALOGE("%s: Fail to request on picture channel", __func__);
2436 pthread_mutex_unlock(&mMutex);
2437 return rc;
2438 }
2439 } else {
2440 CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
2441 __LINE__, output.buffer, frameNumber);
2442 rc = channel->request(output.buffer, frameNumber);
2443 }
2444 if (rc < 0)
2445 ALOGE("%s: request failed", __func__);
2446 }
2447
2448 if(request->input_buffer == NULL) {
2449 /*set the parameters to backend*/
2450 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
2451 }
2452
2453 mFirstRequest = false;
2454 // Added a timed condition wait
2455 struct timespec ts;
2456 uint8_t isValidTimeout = 1;
2457 rc = clock_gettime(CLOCK_REALTIME, &ts);
2458 if (rc < 0) {
2459 isValidTimeout = 0;
2460 ALOGE("%s: Error reading the real time clock!!", __func__);
2461 }
2462 else {
2463 // Make timeout as 5 sec for request to be honored
2464 ts.tv_sec += 5;
2465 }
2466 //Block on conditional variable
2467
2468 mPendingRequest++;
2469 while (mPendingRequest >= MIN_INFLIGHT_REQUESTS) {
2470 if (!isValidTimeout) {
2471 CDBG("%s: Blocking on conditional wait", __func__);
2472 pthread_cond_wait(&mRequestCond, &mMutex);
2473 }
2474 else {
2475 CDBG("%s: Blocking on timed conditional wait", __func__);
2476 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
2477 if (rc == ETIMEDOUT) {
2478 rc = -ENODEV;
2479 ALOGE("%s: Unblocked on timeout!!!!", __func__);
2480 break;
2481 }
2482 }
2483 CDBG("%s: Unblocked", __func__);
2484 if (mWokenUpByDaemon) {
2485 mWokenUpByDaemon = false;
2486 if (mPendingRequest < MAX_INFLIGHT_REQUESTS)
2487 break;
2488 }
2489 }
2490 pthread_mutex_unlock(&mMutex);
2491
2492 return rc;
2493 }
2494
2495 /*===========================================================================
2496 * FUNCTION : dump
2497 *
2498 * DESCRIPTION:
2499 *
2500 * PARAMETERS :
2501 *
2502 *
2503 * RETURN :
2504 *==========================================================================*/
dump(int fd)2505 void QCamera3HardwareInterface::dump(int fd)
2506 {
2507 pthread_mutex_lock(&mMutex);
2508 dprintf(fd, "\n Camera HAL3 information Begin \n");
2509
2510 dprintf(fd, "\nNumber of pending requests: %d \n",
2511 mPendingRequestsList.size());
2512 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2513 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
2514 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
2515 for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2516 i != mPendingRequestsList.end(); i++) {
2517 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
2518 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
2519 i->input_buffer);
2520 }
2521 dprintf(fd, "\nPending buffer map: Number of buffers: %d\n",
2522 mPendingBuffersMap.num_buffers);
2523 dprintf(fd, "-------+-------------\n");
2524 dprintf(fd, " Frame | Stream type \n");
2525 dprintf(fd, "-------+-------------\n");
2526 for(List<PendingBufferInfo>::iterator i =
2527 mPendingBuffersMap.mPendingBufferList.begin();
2528 i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
2529 dprintf(fd, " %5d | %11d \n",
2530 i->frame_number, i->stream->stream_type);
2531 }
2532 dprintf(fd, "-------+-------------\n");
2533
2534 dprintf(fd, "\nPending frame drop list: %d\n",
2535 mPendingFrameDropList.size());
2536 dprintf(fd, "-------+-----------\n");
2537 dprintf(fd, " Frame | Stream ID \n");
2538 dprintf(fd, "-------+-----------\n");
2539 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
2540 i != mPendingFrameDropList.end(); i++) {
2541 dprintf(fd, " %5d | %9d \n",
2542 i->frame_number, i->stream_ID);
2543 }
2544 dprintf(fd, "-------+-----------\n");
2545
2546 dprintf(fd, "\n Camera HAL3 information End \n");
2547 pthread_mutex_unlock(&mMutex);
2548 return;
2549 }
2550
2551 /*===========================================================================
2552 * FUNCTION : flush
2553 *
2554 * DESCRIPTION:
2555 *
2556 * PARAMETERS :
2557 *
2558 *
2559 * RETURN :
2560 *==========================================================================*/
flush()2561 int QCamera3HardwareInterface::flush()
2562 {
2563 ATRACE_CALL();
2564 unsigned int frameNum = 0;
2565 camera3_notify_msg_t notify_msg;
2566 camera3_capture_result_t result;
2567 camera3_stream_buffer_t *pStream_Buf = NULL;
2568 FlushMap flushMap;
2569
2570 CDBG("%s: Unblocking Process Capture Request", __func__);
2571 pthread_mutex_lock(&mMutex);
2572 mFlush = true;
2573 pthread_mutex_unlock(&mMutex);
2574
2575 memset(&result, 0, sizeof(camera3_capture_result_t));
2576
2577 // Stop the Streams/Channels
2578 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2579 it != mStreamInfo.end(); it++) {
2580 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2581 channel->stop();
2582 (*it)->status = INVALID;
2583 }
2584
2585 if (mSupportChannel) {
2586 mSupportChannel->stop();
2587 }
2588 if (mRawDumpChannel) {
2589 mRawDumpChannel->stop();
2590 }
2591 if (mMetadataChannel) {
2592 /* If content of mStreamInfo is not 0, there is metadata stream */
2593 mMetadataChannel->stop();
2594 }
2595
2596 // Mutex Lock
2597 pthread_mutex_lock(&mMutex);
2598
2599 // Unblock process_capture_request
2600 mPendingRequest = 0;
2601 pthread_cond_signal(&mRequestCond);
2602
2603 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2604 frameNum = i->frame_number;
2605 CDBG("%s: Oldest frame num on mPendingRequestsList = %d",
2606 __func__, frameNum);
2607
2608 // Go through the pending buffers and group them depending
2609 // on frame number
2610 for (List<PendingBufferInfo>::iterator k =
2611 mPendingBuffersMap.mPendingBufferList.begin();
2612 k != mPendingBuffersMap.mPendingBufferList.end();) {
2613
2614 if (k->frame_number < frameNum) {
2615 ssize_t idx = flushMap.indexOfKey(k->frame_number);
2616 if (idx == NAME_NOT_FOUND) {
2617 Vector<PendingBufferInfo> pending;
2618 pending.add(*k);
2619 flushMap.add(k->frame_number, pending);
2620 } else {
2621 Vector<PendingBufferInfo> &pending =
2622 flushMap.editValueFor(k->frame_number);
2623 pending.add(*k);
2624 }
2625
2626 mPendingBuffersMap.num_buffers--;
2627 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2628 } else {
2629 k++;
2630 }
2631 }
2632
2633 for (size_t i = 0; i < flushMap.size(); i++) {
2634 uint32_t frame_number = flushMap.keyAt(i);
2635 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2636
2637 // Send Error notify to frameworks for each buffer for which
2638 // metadata buffer is already sent
2639 CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
2640 __func__, frame_number, pending.size());
2641
2642 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2643 if (NULL == pStream_Buf) {
2644 ALOGE("%s: No memory for pending buffers array", __func__);
2645 pthread_mutex_unlock(&mMutex);
2646 return NO_MEMORY;
2647 }
2648
2649 for (size_t j = 0; j < pending.size(); j++) {
2650 const PendingBufferInfo &info = pending.itemAt(j);
2651 notify_msg.type = CAMERA3_MSG_ERROR;
2652 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2653 notify_msg.message.error.error_stream = info.stream;
2654 notify_msg.message.error.frame_number = frame_number;
2655 pStream_Buf[j].acquire_fence = -1;
2656 pStream_Buf[j].release_fence = -1;
2657 pStream_Buf[j].buffer = info.buffer;
2658 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2659 pStream_Buf[j].stream = info.stream;
2660 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2661 CDBG("%s: notify frame_number = %d stream %p", __func__,
2662 frame_number, info.stream);
2663 }
2664
2665 result.result = NULL;
2666 result.frame_number = frame_number;
2667 result.num_output_buffers = pending.size();
2668 result.output_buffers = pStream_Buf;
2669 mCallbackOps->process_capture_result(mCallbackOps, &result);
2670
2671 delete [] pStream_Buf;
2672 }
2673
2674 CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
2675
2676 flushMap.clear();
2677 for (List<PendingBufferInfo>::iterator k =
2678 mPendingBuffersMap.mPendingBufferList.begin();
2679 k != mPendingBuffersMap.mPendingBufferList.end();) {
2680 ssize_t idx = flushMap.indexOfKey(k->frame_number);
2681 if (idx == NAME_NOT_FOUND) {
2682 Vector<PendingBufferInfo> pending;
2683 pending.add(*k);
2684 flushMap.add(k->frame_number, pending);
2685 } else {
2686 Vector<PendingBufferInfo> &pending =
2687 flushMap.editValueFor(k->frame_number);
2688 pending.add(*k);
2689 }
2690
2691 mPendingBuffersMap.num_buffers--;
2692 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2693 }
2694
2695 // Go through the pending requests info and send error request to framework
2696 for (size_t i = 0; i < flushMap.size(); i++) {
2697 uint32_t frame_number = flushMap.keyAt(i);
2698 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2699 CDBG("%s:Sending ERROR REQUEST for frame %d",
2700 __func__, frame_number);
2701
2702 // Send shutter notify to frameworks
2703 notify_msg.type = CAMERA3_MSG_ERROR;
2704 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
2705 notify_msg.message.error.error_stream = NULL;
2706 notify_msg.message.error.frame_number = frame_number;
2707 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2708
2709 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2710 if (NULL == pStream_Buf) {
2711 ALOGE("%s: No memory for pending buffers array", __func__);
2712 pthread_mutex_unlock(&mMutex);
2713 return NO_MEMORY;
2714 }
2715
2716 for (size_t j = 0; j < pending.size(); j++) {
2717 const PendingBufferInfo &info = pending.itemAt(j);
2718 pStream_Buf[j].acquire_fence = -1;
2719 pStream_Buf[j].release_fence = -1;
2720 pStream_Buf[j].buffer = info.buffer;
2721 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2722 pStream_Buf[j].stream = info.stream;
2723 }
2724
2725 result.num_output_buffers = pending.size();
2726 result.output_buffers = pStream_Buf;
2727 result.result = NULL;
2728 result.frame_number = frame_number;
2729 mCallbackOps->process_capture_result(mCallbackOps, &result);
2730 delete [] pStream_Buf;
2731 }
2732
2733 /* Reset pending buffer list and requests list */
2734 mPendingRequestsList.clear();
2735 /* Reset pending frame Drop list and requests list */
2736 mPendingFrameDropList.clear();
2737
2738 flushMap.clear();
2739 mPendingBuffersMap.num_buffers = 0;
2740 mPendingBuffersMap.mPendingBufferList.clear();
2741 mPendingReprocessResultList.clear();
2742 CDBG("%s: Cleared all the pending buffers ", __func__);
2743
2744 mFlush = false;
2745
2746 // Start the Streams/Channels
2747 int rc = NO_ERROR;
2748 if (mMetadataChannel) {
2749 /* If content of mStreamInfo is not 0, there is metadata stream */
2750 rc = mMetadataChannel->start();
2751 if (rc < 0) {
2752 ALOGE("%s: META channel start failed", __func__);
2753 pthread_mutex_unlock(&mMutex);
2754 return rc;
2755 }
2756 }
2757 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2758 it != mStreamInfo.end(); it++) {
2759 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2760 rc = channel->start();
2761 if (rc < 0) {
2762 ALOGE("%s: channel start failed", __func__);
2763 pthread_mutex_unlock(&mMutex);
2764 return rc;
2765 }
2766 }
2767 if (mSupportChannel) {
2768 rc = mSupportChannel->start();
2769 if (rc < 0) {
2770 ALOGE("%s: Support channel start failed", __func__);
2771 pthread_mutex_unlock(&mMutex);
2772 return rc;
2773 }
2774 }
2775 if (mRawDumpChannel) {
2776 rc = mRawDumpChannel->start();
2777 if (rc < 0) {
2778 ALOGE("%s: RAW dump channel start failed", __func__);
2779 pthread_mutex_unlock(&mMutex);
2780 return rc;
2781 }
2782 }
2783
2784 pthread_mutex_unlock(&mMutex);
2785
2786 return 0;
2787 }
2788
2789 /*===========================================================================
2790 * FUNCTION : captureResultCb
2791 *
2792 * DESCRIPTION: Callback handler for all capture result
2793 * (streams, as well as metadata)
2794 *
2795 * PARAMETERS :
2796 * @metadata : metadata information
2797 * @buffer : actual gralloc buffer to be returned to frameworks.
2798 * NULL if metadata.
2799 *
2800 * RETURN : NONE
2801 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number)2802 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
2803 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2804 {
2805 pthread_mutex_lock(&mMutex);
2806
2807 /* Assume flush() is called before any reprocessing. Send
2808 * notify and result immediately upon receipt of any callback*/
2809 if (mLoopBackResult) {
2810 /* Send notify */
2811 camera3_notify_msg_t notify_msg;
2812 notify_msg.type = CAMERA3_MSG_SHUTTER;
2813 notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
2814 notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
2815 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2816 /* Send capture result */
2817 mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
2818 free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
2819 free(mLoopBackResult);
2820 mLoopBackResult = NULL;
2821 }
2822
2823 if (metadata_buf)
2824 handleMetadataWithLock(metadata_buf);
2825 else
2826 handleBufferWithLock(buffer, frame_number);
2827 pthread_mutex_unlock(&mMutex);
2828 }
2829
2830 /*===========================================================================
2831 * FUNCTION : translateFromHalMetadata
2832 *
2833 * DESCRIPTION:
2834 *
2835 * PARAMETERS :
2836 * @metadata : metadata information from callback
2837 * @timestamp: metadata buffer timestamp
2838 * @request_id: request id
2839 * @jpegMetadata: additional jpeg metadata
2840 *
2841 * RETURN : camera_metadata_t*
2842 * metadata in a format specified by fwk
2843 *==========================================================================*/
2844 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent)2845 QCamera3HardwareInterface::translateFromHalMetadata(
2846 metadata_buffer_t *metadata,
2847 nsecs_t timestamp,
2848 int32_t request_id,
2849 const CameraMetadata& jpegMetadata,
2850 uint8_t pipeline_depth,
2851 uint8_t capture_intent)
2852 {
2853 CameraMetadata camMetadata;
2854 camera_metadata_t* resultMetadata;
2855
2856 if (jpegMetadata.entryCount())
2857 camMetadata.append(jpegMetadata);
2858
2859 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1);
2860 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
2861 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
2862 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
2863
2864 if (IS_META_AVAILABLE(CAM_INTF_META_FRAME_NUMBER, metadata)) {
2865 int64_t frame_number = *(uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2866 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
2867 }
2868
2869
2870 if (IS_META_AVAILABLE(CAM_INTF_PARM_FPS_RANGE, metadata)) {
2871 int32_t fps_range[2];
2872 cam_fps_range_t * float_range =
2873 (cam_fps_range_t *)POINTER_OF_PARAM(CAM_INTF_PARM_FPS_RANGE, metadata);
2874 fps_range[0] = (int32_t)float_range->min_fps;
2875 fps_range[1] = (int32_t)float_range->max_fps;
2876 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
2877 fps_range, 2);
2878 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
2879 __func__, fps_range[0], fps_range[1]);
2880 }
2881
2882
2883 if (IS_META_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata)) {
2884 int32_t *expCompensation =
2885 (int32_t *)POINTER_OF_META(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2886 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2887 expCompensation, 1);
2888 }
2889
2890 if (IS_META_AVAILABLE(CAM_INTF_PARM_BESTSHOT_MODE, metadata)) {
2891 uint8_t sceneMode =
2892 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_BESTSHOT_MODE, metadata));
2893 uint8_t fwkSceneMode =
2894 (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2895 sizeof(SCENE_MODES_MAP)/
2896 sizeof(SCENE_MODES_MAP[0]), sceneMode);
2897 camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2898 &fwkSceneMode, 1);
2899 }
2900
2901 if (IS_META_AVAILABLE(CAM_INTF_PARM_AEC_LOCK, metadata)) {
2902 uint8_t ae_lock =
2903 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AEC_LOCK, metadata));
2904 camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2905 &ae_lock, 1);
2906 }
2907
2908 if (IS_META_AVAILABLE(CAM_INTF_PARM_AWB_LOCK, metadata)) {
2909 uint8_t awb_lock =
2910 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AWB_LOCK, metadata));
2911 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
2912 }
2913
2914 if (IS_META_AVAILABLE(CAM_INTF_META_FACE_DETECTION, metadata)){
2915 cam_face_detection_data_t *faceDetectionInfo =
2916 (cam_face_detection_data_t *)POINTER_OF_META(CAM_INTF_META_FACE_DETECTION, metadata);
2917 uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2918 int32_t faceIds[MAX_ROI];
2919 uint8_t faceScores[MAX_ROI];
2920 int32_t faceRectangles[MAX_ROI * 4];
2921 int32_t faceLandmarks[MAX_ROI * 6];
2922 int j = 0, k = 0;
2923 for (int i = 0; i < numFaces; i++) {
2924 faceIds[i] = faceDetectionInfo->faces[i].face_id;
2925 faceScores[i] = faceDetectionInfo->faces[i].score;
2926 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2927 faceRectangles+j, -1);
2928 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2929 j+= 4;
2930 k+= 6;
2931 }
2932 if (numFaces <= 0) {
2933 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2934 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2935 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2936 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2937 }
2938 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2939 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2940 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2941 faceRectangles, numFaces*4);
2942 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2943 faceLandmarks, numFaces*6);
2944 }
2945 if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_MODE, metadata)){
2946 uint8_t *color_correct_mode =
2947 (uint8_t *)POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2948 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2949 }
2950 if (IS_META_AVAILABLE(CAM_INTF_META_EDGE_MODE, metadata)) {
2951 cam_edge_application_t *edgeApplication =
2952 (cam_edge_application_t *)POINTER_OF_META(CAM_INTF_META_EDGE_MODE, metadata);
2953 uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2954 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2955 camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2956 }
2957 if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_POWER, metadata)) {
2958 uint8_t *flashPower =
2959 (uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_POWER, metadata);
2960 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2961 }
2962 if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_FIRING_TIME, metadata)) {
2963 int64_t *flashFiringTime =
2964 (int64_t *)POINTER_OF_META(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2965 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2966 }
2967 if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_STATE, metadata)) {
2968 uint8_t flashState =
2969 *((uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_STATE, metadata));
2970 if (!gCamCapability[mCameraId]->flash_available) {
2971 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2972 }
2973 camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2974 }
2975 if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_MODE, metadata)){
2976 uint8_t flashMode = *((uint8_t*)
2977 POINTER_OF_META(CAM_INTF_META_FLASH_MODE, metadata));
2978 uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2979 sizeof(FLASH_MODES_MAP), flashMode);
2980 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2981 }
2982 if (IS_META_AVAILABLE(CAM_INTF_META_HOTPIXEL_MODE, metadata)) {
2983 uint8_t *hotPixelMode =
2984 (uint8_t *)POINTER_OF_META(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2985 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2986 }
2987 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_APERTURE, metadata)){
2988 float *lensAperture =
2989 (float *)POINTER_OF_META(CAM_INTF_META_LENS_APERTURE, metadata);
2990 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2991 }
2992 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FILTERDENSITY, metadata)) {
2993 float *filterDensity =
2994 (float *)POINTER_OF_META(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2995 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2996 }
2997 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)){
2998 float *focalLength =
2999 (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
3000 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
3001 }
3002
3003 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata)) {
3004 uint8_t *opticalStab =
3005 (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
3006 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
3007 }
3008 if (IS_META_AVAILABLE(CAM_INTF_PARM_DIS_ENABLE, metadata)) {
3009 uint8_t *vsMode =
3010 (uint8_t *)POINTER_OF_META(CAM_INTF_PARM_DIS_ENABLE, metadata);
3011 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, vsMode, 1);
3012 }
3013
3014 if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
3015 uint8_t *noiseRedMode =
3016 (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
3017 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
3018 }
3019 if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata)) {
3020 uint8_t *noiseRedStrength =
3021 (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
3022 camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
3023 }
3024 if (IS_META_AVAILABLE(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata)) {
3025 float *effectiveExposureFactor =
3026 (float *)POINTER_OF_META(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata);
3027 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
3028 }
3029 if (IS_META_AVAILABLE(CAM_INTF_META_SCALER_CROP_REGION, metadata)) {
3030 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *)
3031 POINTER_OF_META(CAM_INTF_META_SCALER_CROP_REGION, metadata);
3032 int32_t scalerCropRegion[4];
3033 scalerCropRegion[0] = hScalerCropRegion->left;
3034 scalerCropRegion[1] = hScalerCropRegion->top;
3035 scalerCropRegion[2] = hScalerCropRegion->width;
3036 scalerCropRegion[3] = hScalerCropRegion->height;
3037 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
3038 }
3039 if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)){
3040 int64_t *sensorExpTime =
3041 (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
3042 CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
3043 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
3044 }
3045 if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata)){
3046 int64_t *sensorFameDuration =
3047 (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
3048 CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
3049 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
3050 }
3051 if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata)){
3052 int64_t *sensorRollingShutterSkew =
3053 (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
3054 metadata);
3055 CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
3056 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
3057 sensorRollingShutterSkew, 1);
3058 }
3059
3060 if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)){
3061 int32_t sensorSensitivity =
3062 *((int32_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
3063 CDBG("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
3064 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
3065
3066 //calculate the noise profile based on sensitivity
3067 double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
3068 double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
3069 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
3070 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i +=2) {
3071 noise_profile[i] = noise_profile_S;
3072 noise_profile[i+1] = noise_profile_O;
3073 }
3074 CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
3075 noise_profile_S, noise_profile_O);
3076 camMetadata.update( ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
3077 2 * gCamCapability[mCameraId]->num_color_channels);
3078 }
3079
3080
3081 if (IS_META_AVAILABLE(CAM_INTF_META_SHADING_MODE, metadata)) {
3082 uint8_t *shadingMode =
3083 (uint8_t *)POINTER_OF_META(CAM_INTF_META_SHADING_MODE, metadata);
3084 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
3085 }
3086 if (IS_META_AVAILABLE(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata)) {
3087 uint8_t *faceDetectMode =
3088 (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
3089 uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
3090 sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), *faceDetectMode);
3091 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
3092 }
3093 if (IS_META_AVAILABLE(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata)) {
3094 uint8_t *histogramMode =
3095 (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
3096 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
3097 }
3098 if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata)){
3099 uint8_t *sharpnessMapMode =
3100 (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
3101 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3102 sharpnessMapMode, 1);
3103 }
3104 if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata)){
3105 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *)
3106 POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
3107 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
3108 (int32_t*)sharpnessMap->sharpness,
3109 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
3110 }
3111 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP, metadata)) {
3112 cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
3113 POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP, metadata);
3114 int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
3115 int map_width = gCamCapability[mCameraId]->lens_shading_map_size.width;
3116 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
3117 (float*)lensShadingMap->lens_shading,
3118 4*map_width*map_height);
3119 }
3120 if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_MODE, metadata)) {
3121 uint8_t *toneMapMode =
3122 (uint8_t *)POINTER_OF_META(CAM_INTF_META_TONEMAP_MODE, metadata);
3123 camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
3124 }
3125 if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_CURVES, metadata)){
3126 //Populate CAM_INTF_META_TONEMAP_CURVES
3127 /* ch0 = G, ch 1 = B, ch 2 = R*/
3128 cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
3129 POINTER_OF_META(CAM_INTF_META_TONEMAP_CURVES, metadata);
3130 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3131 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3132 __func__, tonemap->tonemap_points_cnt,
3133 CAM_MAX_TONEMAP_CURVE_SIZE);
3134 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3135 }
3136
3137 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
3138 (float*)tonemap->curves[0].tonemap_points,
3139 tonemap->tonemap_points_cnt * 2);
3140
3141 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
3142 (float*)tonemap->curves[1].tonemap_points,
3143 tonemap->tonemap_points_cnt * 2);
3144
3145 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
3146 (float*)tonemap->curves[2].tonemap_points,
3147 tonemap->tonemap_points_cnt * 2);
3148 }
3149 if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata)){
3150 cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
3151 POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
3152 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
3153 }
3154 if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata)){
3155 cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
3156 POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
3157 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
3158 (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
3159 }
3160 if (IS_META_AVAILABLE(CAM_INTF_META_PROFILE_TONE_CURVE, metadata)) {
3161 cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
3162 POINTER_OF_META(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
3163 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3164 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3165 __func__, toneCurve->tonemap_points_cnt,
3166 CAM_MAX_TONEMAP_CURVE_SIZE);
3167 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3168 }
3169 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
3170 (float*)toneCurve->curve.tonemap_points,
3171 toneCurve->tonemap_points_cnt * 2);
3172 }
3173 if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata)){
3174 cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
3175 POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
3176 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
3177 predColorCorrectionGains->gains, 4);
3178 }
3179 if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata)){
3180 cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
3181 POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
3182 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3183 (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
3184 }
3185 if (IS_META_AVAILABLE(CAM_INTF_META_OTP_WB_GRGB, metadata)) {
3186 float *otpWbGrGb = (float*) POINTER_OF_META(
3187 CAM_INTF_META_OTP_WB_GRGB, metadata);
3188 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
3189 }
3190 if (IS_META_AVAILABLE(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata)){
3191 uint8_t *blackLevelLock = (uint8_t*)
3192 POINTER_OF_META(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
3193 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
3194 }
3195 if (IS_META_AVAILABLE(CAM_INTF_META_SCENE_FLICKER, metadata)){
3196 uint8_t *sceneFlicker = (uint8_t*)
3197 POINTER_OF_META(CAM_INTF_META_SCENE_FLICKER, metadata);
3198 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
3199 }
3200 if (IS_META_AVAILABLE(CAM_INTF_PARM_EFFECT, metadata)) {
3201 uint8_t *effectMode = (uint8_t*)
3202 POINTER_OF_META(CAM_INTF_PARM_EFFECT, metadata);
3203 uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
3204 sizeof(EFFECT_MODES_MAP),
3205 *effectMode);
3206 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
3207 }
3208 if (IS_META_AVAILABLE(CAM_INTF_META_TEST_PATTERN_DATA, metadata)) {
3209 cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
3210 POINTER_OF_META(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
3211 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
3212 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3213 testPatternData->mode);
3214 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
3215 &fwk_testPatternMode, 1);
3216 int32_t fwk_testPatternData[4];
3217 fwk_testPatternData[0] = testPatternData->r;
3218 fwk_testPatternData[3] = testPatternData->b;
3219 switch (gCamCapability[mCameraId]->color_arrangement) {
3220 case CAM_FILTER_ARRANGEMENT_RGGB:
3221 case CAM_FILTER_ARRANGEMENT_GRBG:
3222 fwk_testPatternData[1] = testPatternData->gr;
3223 fwk_testPatternData[2] = testPatternData->gb;
3224 break;
3225 case CAM_FILTER_ARRANGEMENT_GBRG:
3226 case CAM_FILTER_ARRANGEMENT_BGGR:
3227 fwk_testPatternData[2] = testPatternData->gr;
3228 fwk_testPatternData[1] = testPatternData->gb;
3229 break;
3230 default:
3231 ALOGE("%s: color arrangement %d is not supported", __func__,
3232 gCamCapability[mCameraId]->color_arrangement);
3233 break;
3234 }
3235 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
3236 }
3237 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
3238 double *gps_coords = (double *)POINTER_OF_META(
3239 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
3240 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
3241 }
3242 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
3243 char *gps_methods = (char *)POINTER_OF_META(
3244 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
3245 String8 str(gps_methods);
3246 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
3247 }
3248 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
3249 int64_t *gps_timestamp = (int64_t *)POINTER_OF_META(
3250 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
3251 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
3252 }
3253 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
3254 int32_t *jpeg_orientation = (int32_t *)POINTER_OF_META(
3255 CAM_INTF_META_JPEG_ORIENTATION, metadata);
3256 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
3257 }
3258 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
3259 uint8_t *jpeg_quality = (uint8_t *)POINTER_OF_META(
3260 CAM_INTF_META_JPEG_QUALITY, metadata);
3261 camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
3262 }
3263 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
3264 uint8_t *thumb_quality = (uint8_t *)POINTER_OF_META(
3265 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
3266 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
3267 }
3268 if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
3269 cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF_META(
3270 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
3271 // Note: cam_dimension_t should have the right layout, but for safety just copy it.
3272 int32_t thumbnail_size[2];
3273 thumbnail_size[0] = thumb_size->width;
3274 thumbnail_size[1] = thumb_size->height;
3275 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size, 2);
3276 }
3277 if (IS_META_AVAILABLE(CAM_INTF_META_PRIVATE_DATA, metadata)) {
3278 int32_t *privateData = (int32_t *)
3279 POINTER_OF_META(CAM_INTF_META_PRIVATE_DATA, metadata);
3280 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
3281 privateData, MAX_METADATA_PRIVATE_PAYLOAD_SIZE);
3282 }
3283 if (metadata->is_tuning_params_valid) {
3284 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
3285 uint8_t *data = (uint8_t*)&tuning_meta_data_blob[0];
3286 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
3287
3288
3289 memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_data_version),
3290 sizeof(uint32_t));
3291 data += sizeof(uint32_t);
3292
3293 memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_sensor_data_size),
3294 sizeof(uint32_t));
3295 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3296 data += sizeof(uint32_t);
3297
3298 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
3299 sizeof(uint32_t));
3300 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3301 data += sizeof(uint32_t);
3302
3303 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
3304 sizeof(uint32_t));
3305 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3306 data += sizeof(uint32_t);
3307
3308 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
3309 sizeof(uint32_t));
3310 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3311 data += sizeof(uint32_t);
3312
3313 metadata->tuning_params.tuning_mod3_data_size = 0;
3314 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
3315 sizeof(uint32_t));
3316 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3317 data += sizeof(uint32_t);
3318
3319 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
3320 metadata->tuning_params.tuning_sensor_data_size);
3321 data += metadata->tuning_params.tuning_sensor_data_size;
3322
3323 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
3324 metadata->tuning_params.tuning_vfe_data_size);
3325 data += metadata->tuning_params.tuning_vfe_data_size;
3326
3327 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
3328 metadata->tuning_params.tuning_cpp_data_size);
3329 data += metadata->tuning_params.tuning_cpp_data_size;
3330
3331
3332 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
3333 metadata->tuning_params.tuning_cac_data_size);
3334 data += metadata->tuning_params.tuning_cac_data_size;
3335
3336 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
3337 (int32_t*)tuning_meta_data_blob, (data-tuning_meta_data_blob)/sizeof(uint32_t));
3338 }
3339 if (IS_META_AVAILABLE(CAM_INTF_META_NEUTRAL_COL_POINT, metadata)) {
3340 cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
3341 POINTER_OF_META(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
3342 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
3343 (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
3344 }
3345
3346 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata)) {
3347 uint8_t shadingMapMode =
3348 *((uint32_t *)POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata));
3349 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
3350 }
3351
3352 if (IS_META_AVAILABLE(CAM_INTF_META_AEC_ROI, metadata)) {
3353 cam_area_t *hAeRegions =
3354 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AEC_ROI, metadata);
3355 int32_t aeRegions[5];
3356 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
3357 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
3358 CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3359 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
3360 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
3361 hAeRegions->rect.height);
3362 }
3363
3364 if (IS_META_AVAILABLE(CAM_INTF_META_AF_ROI, metadata)) {
3365 /*af regions*/
3366 cam_area_t *hAfRegions =
3367 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AF_ROI, metadata);
3368 int32_t afRegions[5];
3369 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
3370 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
3371 CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
3372 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
3373 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
3374 hAfRegions->rect.height);
3375 }
3376
3377 if (IS_META_AVAILABLE(CAM_INTF_PARM_ANTIBANDING, metadata)) {
3378 uint8_t hal_ab_mode =
3379 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_ANTIBANDING, metadata));
3380 uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
3381 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3382 hal_ab_mode);
3383 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
3384 &fwk_ab_mode, 1);
3385 }
3386
3387 if (IS_META_AVAILABLE(CAM_INTF_META_MODE, metadata)) {
3388 uint8_t mode =
3389 *((uint32_t *)POINTER_OF_META(CAM_INTF_META_MODE, metadata));
3390 camMetadata.update(ANDROID_CONTROL_MODE, &mode, 1);
3391 }
3392
3393 /* Constant metadata values to be update*/
3394 uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
3395 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
3396
3397 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
3398 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
3399
3400 int32_t hotPixelMap[2];
3401 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
3402
3403 uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3404 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3405
3406 // CDS
3407 if (IS_META_AVAILABLE(CAM_INTF_PARM_CDS_MODE, metadata)) {
3408 cam_cds_mode_type_t *cds = (cam_cds_mode_type_t *)
3409 POINTER_OF_META(CAM_INTF_PARM_CDS_MODE, metadata);
3410 int32_t mode = *cds;
3411 camMetadata.update(QCAMERA3_CDS_MODE,
3412 &mode, 1);
3413 }
3414
3415 // Reprocess crop data
3416 if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, metadata)) {
3417 cam_crop_data_t *crop_data = (cam_crop_data_t *)
3418 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, metadata);
3419 uint8_t cnt = crop_data->num_of_streams;
3420 if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
3421 int rc = NO_ERROR;
3422 int32_t *crop = new int32_t[cnt*4];
3423 if (NULL == crop) {
3424 rc = NO_MEMORY;
3425 }
3426
3427 int32_t *crop_stream_ids = new int32_t[cnt];
3428 if (NULL == crop_stream_ids) {
3429 rc = NO_MEMORY;
3430 }
3431
3432 if (NO_ERROR == rc) {
3433 int32_t steams_found = 0;
3434 for (size_t i = 0; i < cnt; i++) {
3435 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3436 it != mStreamInfo.end(); it++) {
3437 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3438 if (NULL != channel) {
3439 if (crop_data->crop_info[i].stream_id ==
3440 channel->mStreams[0]->getMyServerID()) {
3441 crop[steams_found*4] = crop_data->crop_info[i].crop.left;
3442 crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top;
3443 crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width;
3444 crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height;
3445 // In a more general case we may want to generate
3446 // unique id depending on width, height, stream, private
3447 // data etc.
3448 crop_stream_ids[steams_found] = (int32_t)(*it)->stream;
3449 steams_found++;
3450 CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
3451 __func__,
3452 (*it)->stream,
3453 crop_data->crop_info[i].crop.left,
3454 crop_data->crop_info[i].crop.top,
3455 crop_data->crop_info[i].crop.width,
3456 crop_data->crop_info[i].crop.height);
3457 break;
3458 }
3459 }
3460 }
3461 }
3462
3463 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
3464 &steams_found, 1);
3465 camMetadata.update(QCAMERA3_CROP_REPROCESS,
3466 crop, steams_found*4);
3467 camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS,
3468 crop_stream_ids, steams_found);
3469 }
3470
3471 if (crop) {
3472 delete [] crop;
3473 }
3474 if (crop_stream_ids) {
3475 delete [] crop_stream_ids;
3476 }
3477 } else {
3478 // mm-qcamera-daemon only posts crop_data for streams
3479 // not linked to pproc. So no valid crop metadata is not
3480 // necessarily an error case.
3481 CDBG("%s: No valid crop metadata entries", __func__);
3482 }
3483 }
3484
3485 if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_CAC, metadata)) {
3486 cam_aberration_mode_t *cacMode = (cam_aberration_mode_t *)
3487 POINTER_OF_PARAM(CAM_INTF_PARM_CAC, metadata);
3488 int32_t cac = lookupFwkName(COLOR_ABERRATION_MAP,
3489 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
3490 *cacMode);
3491 if (NAME_NOT_FOUND != cac) {
3492 uint8_t val = (uint8_t) cac;
3493 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
3494 &val,
3495 1);
3496 } else {
3497 ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
3498 }
3499 }
3500
3501 resultMetadata = camMetadata.release();
3502 return resultMetadata;
3503 }
3504
3505 /*===========================================================================
3506 * FUNCTION : translateCbUrgentMetadataToResultMetadata
3507 *
3508 * DESCRIPTION:
3509 *
3510 * PARAMETERS :
3511 * @metadata : metadata information from callback
3512 *
3513 * RETURN : camera_metadata_t*
3514 * metadata in a format specified by fwk
3515 *==========================================================================*/
3516 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)3517 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
3518 (metadata_buffer_t *metadata)
3519 {
3520 CameraMetadata camMetadata;
3521 camera_metadata_t* resultMetadata;
3522 uint8_t aeMode = CAM_AE_MODE_MAX;
3523 int32_t *flashMode = NULL;
3524 int32_t *redeye = NULL;
3525
3526 if (IS_META_AVAILABLE(CAM_INTF_META_AEC_STATE, metadata)) {
3527 uint8_t *ae_state = (uint8_t *)
3528 POINTER_OF_META(CAM_INTF_META_AEC_STATE, metadata);
3529 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
3530 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
3531 }
3532
3533 if (IS_META_AVAILABLE(CAM_INTF_META_AF_STATE, metadata)) {
3534 uint8_t *afState = (uint8_t *)
3535 POINTER_OF_META(CAM_INTF_META_AF_STATE, metadata);
3536 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
3537 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %d", __func__, *afState);
3538 }
3539
3540 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata)) {
3541 float *focusDistance =
3542 (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
3543 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
3544 }
3545
3546 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_RANGE, metadata)) {
3547 float *focusRange =
3548 (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
3549 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
3550 }
3551
3552 if (IS_META_AVAILABLE(CAM_INTF_META_AWB_STATE, metadata)) {
3553 uint8_t *whiteBalanceState = (uint8_t *)
3554 POINTER_OF_META(CAM_INTF_META_AWB_STATE, metadata);
3555 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
3556 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
3557 }
3558
3559 if (IS_META_AVAILABLE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata)) {
3560 cam_trigger_t *aecTrigger =
3561 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata);
3562 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
3563 &aecTrigger->trigger, 1);
3564 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
3565 &aecTrigger->trigger_id, 1);
3566 }
3567
3568 if (IS_META_AVAILABLE(CAM_INTF_PARM_FOCUS_MODE, metadata)) {
3569 uint8_t *focusMode = (uint8_t *)
3570 POINTER_OF_META(CAM_INTF_PARM_FOCUS_MODE, metadata);
3571 uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
3572 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
3573 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
3574 }
3575
3576 if (IS_META_AVAILABLE(CAM_INTF_META_AF_TRIGGER, metadata)) {
3577 cam_trigger_t *af_trigger =
3578 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AF_TRIGGER, metadata);
3579 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
3580 &af_trigger->trigger, 1);
3581 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
3582 }
3583
3584 if (IS_META_AVAILABLE(CAM_INTF_PARM_WHITE_BALANCE, metadata)) {
3585 uint8_t *whiteBalance = (uint8_t *)
3586 POINTER_OF_META(CAM_INTF_PARM_WHITE_BALANCE, metadata);
3587 uint8_t fwkWhiteBalanceMode =
3588 (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
3589 sizeof(WHITE_BALANCE_MODES_MAP)/
3590 sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
3591 camMetadata.update(ANDROID_CONTROL_AWB_MODE,
3592 &fwkWhiteBalanceMode, 1);
3593 }
3594
3595 if (IS_META_AVAILABLE(CAM_INTF_META_AEC_MODE, metadata)) {
3596 aeMode = *((uint32_t*) POINTER_OF_META(CAM_INTF_META_AEC_MODE, metadata));
3597 }
3598 if (IS_META_AVAILABLE(CAM_INTF_PARM_LED_MODE, metadata)) {
3599 flashMode = (int32_t*)
3600 POINTER_OF_PARAM(CAM_INTF_PARM_LED_MODE, metadata);
3601 }
3602 if (IS_META_AVAILABLE(CAM_INTF_PARM_REDEYE_REDUCTION, metadata)) {
3603 redeye = (int32_t*)
3604 POINTER_OF_PARAM(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
3605 }
3606
3607 uint8_t fwk_aeMode;
3608 if (redeye != NULL && *redeye == 1) {
3609 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3610 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3611 } else if (flashMode != NULL &&
3612 ((*flashMode == CAM_FLASH_MODE_AUTO)||
3613 (*flashMode == CAM_FLASH_MODE_ON))) {
3614 fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
3615 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
3616 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3617 } else if (aeMode == CAM_AE_MODE_ON) {
3618 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
3619 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3620 } else if (aeMode == CAM_AE_MODE_OFF) {
3621 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
3622 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
3623 } else {
3624 ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%d!!!",__func__,
3625 redeye, flashMode, aeMode);
3626 }
3627
3628 if (IS_META_AVAILABLE(CAM_INTF_META_LENS_STATE, metadata)) {
3629 uint8_t *lensState = (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_STATE, metadata);
3630 camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
3631 }
3632
3633 resultMetadata = camMetadata.release();
3634 return resultMetadata;
3635 }
3636
3637 /*===========================================================================
3638 * FUNCTION : dumpMetadataToFile
3639 *
3640 * DESCRIPTION: Dumps tuning metadata to file system
3641 *
3642 * PARAMETERS :
3643 * @meta : tuning metadata
3644 * @dumpFrameCount : current dump frame count
3645 * @enabled : Enable mask
3646 *
3647 *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,int32_t enabled,const char * type,uint32_t frameNumber)3648 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
3649 uint32_t &dumpFrameCount,
3650 int32_t enabled,
3651 const char *type,
3652 uint32_t frameNumber)
3653 {
3654 //Some sanity checks
3655 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
3656 ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
3657 __func__,
3658 meta.tuning_sensor_data_size,
3659 TUNING_SENSOR_DATA_MAX);
3660 return;
3661 }
3662
3663 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
3664 ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
3665 __func__,
3666 meta.tuning_vfe_data_size,
3667 TUNING_VFE_DATA_MAX);
3668 return;
3669 }
3670
3671 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
3672 ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
3673 __func__,
3674 meta.tuning_cpp_data_size,
3675 TUNING_CPP_DATA_MAX);
3676 return;
3677 }
3678
3679 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
3680 ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
3681 __func__,
3682 meta.tuning_cac_data_size,
3683 TUNING_CAC_DATA_MAX);
3684 return;
3685 }
3686 //
3687
3688 if(enabled){
3689 char timeBuf[FILENAME_MAX];
3690 char buf[FILENAME_MAX];
3691 memset(buf, 0, sizeof(buf));
3692 memset(timeBuf, 0, sizeof(timeBuf));
3693 time_t current_time;
3694 struct tm * timeinfo;
3695 time (¤t_time);
3696 timeinfo = localtime (¤t_time);
3697 strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
3698 String8 filePath(timeBuf);
3699 snprintf(buf,
3700 sizeof(buf),
3701 "%dm_%s_%d.bin",
3702 dumpFrameCount,
3703 type,
3704 frameNumber);
3705 filePath.append(buf);
3706 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
3707 if (file_fd >= 0) {
3708 int written_len = 0;
3709 meta.tuning_data_version = TUNING_DATA_VERSION;
3710 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
3711 written_len += write(file_fd, data, sizeof(uint32_t));
3712 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
3713 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
3714 written_len += write(file_fd, data, sizeof(uint32_t));
3715 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
3716 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
3717 written_len += write(file_fd, data, sizeof(uint32_t));
3718 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
3719 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
3720 written_len += write(file_fd, data, sizeof(uint32_t));
3721 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
3722 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
3723 written_len += write(file_fd, data, sizeof(uint32_t));
3724 meta.tuning_mod3_data_size = 0;
3725 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
3726 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
3727 written_len += write(file_fd, data, sizeof(uint32_t));
3728 int total_size = meta.tuning_sensor_data_size;
3729 data = (void *)((uint8_t *)&meta.data);
3730 written_len += write(file_fd, data, total_size);
3731 total_size = meta.tuning_vfe_data_size;
3732 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
3733 written_len += write(file_fd, data, total_size);
3734 total_size = meta.tuning_cpp_data_size;
3735 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
3736 written_len += write(file_fd, data, total_size);
3737 total_size = meta.tuning_cac_data_size;
3738 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
3739 written_len += write(file_fd, data, total_size);
3740 close(file_fd);
3741 }else {
3742 ALOGE("%s: fail to open file for metadata dumping", __func__);
3743 }
3744 }
3745 }
3746
3747 /*===========================================================================
3748 * FUNCTION : cleanAndSortStreamInfo
3749 *
3750 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
3751 * and sort them such that raw stream is at the end of the list
3752 * This is a workaround for camera daemon constraint.
3753 *
3754 * PARAMETERS : None
3755 *
3756 *==========================================================================*/
cleanAndSortStreamInfo()3757 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
3758 {
3759 List<stream_info_t *> newStreamInfo;
3760
3761 /*clean up invalid streams*/
3762 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
3763 it != mStreamInfo.end();) {
3764 if(((*it)->status) == INVALID){
3765 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
3766 delete channel;
3767 free(*it);
3768 it = mStreamInfo.erase(it);
3769 } else {
3770 it++;
3771 }
3772 }
3773
3774 // Move preview/video/callback/snapshot streams into newList
3775 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3776 it != mStreamInfo.end();) {
3777 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
3778 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
3779 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
3780 newStreamInfo.push_back(*it);
3781 it = mStreamInfo.erase(it);
3782 } else
3783 it++;
3784 }
3785 // Move raw streams into newList
3786 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3787 it != mStreamInfo.end();) {
3788 newStreamInfo.push_back(*it);
3789 it = mStreamInfo.erase(it);
3790 }
3791
3792 mStreamInfo = newStreamInfo;
3793 }
3794
3795 /*===========================================================================
3796 * FUNCTION : extractJpegMetadata
3797 *
3798 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
3799 * JPEG metadata is cached in HAL, and return as part of capture
3800 * result when metadata is returned from camera daemon.
3801 *
3802 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
3803 * @request: capture request
3804 *
3805 *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)3806 void QCamera3HardwareInterface::extractJpegMetadata(
3807 CameraMetadata& jpegMetadata,
3808 const camera3_capture_request_t *request)
3809 {
3810 CameraMetadata frame_settings;
3811 frame_settings = request->settings;
3812
3813 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
3814 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
3815 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
3816 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
3817
3818 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
3819 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
3820 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
3821 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
3822
3823 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
3824 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
3825 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
3826 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
3827
3828 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
3829 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
3830 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
3831 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
3832
3833 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
3834 jpegMetadata.update(ANDROID_JPEG_QUALITY,
3835 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
3836 frame_settings.find(ANDROID_JPEG_QUALITY).count);
3837
3838 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
3839 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
3840 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
3841 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
3842
3843 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
3844 int32_t thumbnail_size[2];
3845 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
3846 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
3847 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
3848 int32_t orientation =
3849 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
3850 if ((orientation == 90) || (orientation == 270)) {
3851 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
3852 int32_t temp;
3853 temp = thumbnail_size[0];
3854 thumbnail_size[0] = thumbnail_size[1];
3855 thumbnail_size[1] = temp;
3856 }
3857 }
3858 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
3859 thumbnail_size,
3860 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
3861 }
3862 }
3863
3864 /*===========================================================================
3865 * FUNCTION : convertToRegions
3866 *
3867 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
3868 *
3869 * PARAMETERS :
3870 * @rect : cam_rect_t struct to convert
3871 * @region : int32_t destination array
3872 * @weight : if we are converting from cam_area_t, weight is valid
3873 * else weight = -1
3874 *
3875 *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)3876 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
3877 region[0] = rect.left;
3878 region[1] = rect.top;
3879 region[2] = rect.left + rect.width;
3880 region[3] = rect.top + rect.height;
3881 if (weight > -1) {
3882 region[4] = weight;
3883 }
3884 }
3885
3886 /*===========================================================================
3887 * FUNCTION : convertFromRegions
3888 *
3889 * DESCRIPTION: helper method to convert from array to cam_rect_t
3890 *
3891 * PARAMETERS :
3892 * @rect : cam_rect_t struct to convert
3893 * @region : int32_t destination array
3894 * @weight : if we are converting from cam_area_t, weight is valid
3895 * else weight = -1
3896 *
3897 *==========================================================================*/
convertFromRegions(cam_area_t * roi,const camera_metadata_t * settings,uint32_t tag)3898 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
3899 const camera_metadata_t *settings,
3900 uint32_t tag){
3901 CameraMetadata frame_settings;
3902 frame_settings = settings;
3903 int32_t x_min = frame_settings.find(tag).data.i32[0];
3904 int32_t y_min = frame_settings.find(tag).data.i32[1];
3905 int32_t x_max = frame_settings.find(tag).data.i32[2];
3906 int32_t y_max = frame_settings.find(tag).data.i32[3];
3907 roi->weight = frame_settings.find(tag).data.i32[4];
3908 roi->rect.left = x_min;
3909 roi->rect.top = y_min;
3910 roi->rect.width = x_max - x_min;
3911 roi->rect.height = y_max - y_min;
3912 }
3913
3914 /*===========================================================================
3915 * FUNCTION : resetIfNeededROI
3916 *
3917 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
3918 * crop region
3919 *
3920 * PARAMETERS :
3921 * @roi : cam_area_t struct to resize
3922 * @scalerCropRegion : cam_crop_region_t region to compare against
3923 *
3924 *
3925 *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)3926 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
3927 const cam_crop_region_t* scalerCropRegion)
3928 {
3929 int32_t roi_x_max = roi->rect.width + roi->rect.left;
3930 int32_t roi_y_max = roi->rect.height + roi->rect.top;
3931 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
3932 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
3933
3934 /* According to spec weight = 0 is used to indicate roi needs to be disabled
3935 * without having this check the calculations below to validate if the roi
3936 * is inside scalar crop region will fail resulting in the roi not being
3937 * reset causing algorithm to continue to use stale roi window
3938 */
3939 if (roi->weight == 0) {
3940 return true;
3941 }
3942
3943 if ((roi_x_max < scalerCropRegion->left) ||
3944 // right edge of roi window is left of scalar crop's left edge
3945 (roi_y_max < scalerCropRegion->top) ||
3946 // bottom edge of roi window is above scalar crop's top edge
3947 (roi->rect.left > crop_x_max) ||
3948 // left edge of roi window is beyond(right) of scalar crop's right edge
3949 (roi->rect.top > crop_y_max)){
3950 // top edge of roi windo is above scalar crop's top edge
3951 return false;
3952 }
3953 if (roi->rect.left < scalerCropRegion->left) {
3954 roi->rect.left = scalerCropRegion->left;
3955 }
3956 if (roi->rect.top < scalerCropRegion->top) {
3957 roi->rect.top = scalerCropRegion->top;
3958 }
3959 if (roi_x_max > crop_x_max) {
3960 roi_x_max = crop_x_max;
3961 }
3962 if (roi_y_max > crop_y_max) {
3963 roi_y_max = crop_y_max;
3964 }
3965 roi->rect.width = roi_x_max - roi->rect.left;
3966 roi->rect.height = roi_y_max - roi->rect.top;
3967 return true;
3968 }
3969
3970 /*===========================================================================
3971 * FUNCTION : convertLandmarks
3972 *
3973 * DESCRIPTION: helper method to extract the landmarks from face detection info
3974 *
3975 * PARAMETERS :
3976 * @face : cam_rect_t struct to convert
3977 * @landmarks : int32_t destination array
3978 *
3979 *
3980 *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)3981 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
3982 {
3983 landmarks[0] = face.left_eye_center.x;
3984 landmarks[1] = face.left_eye_center.y;
3985 landmarks[2] = face.right_eye_center.x;
3986 landmarks[3] = face.right_eye_center.y;
3987 landmarks[4] = face.mouth_center.x;
3988 landmarks[5] = face.mouth_center.y;
3989 }
3990
3991 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
3992 /*===========================================================================
3993 * FUNCTION : initCapabilities
3994 *
3995 * DESCRIPTION: initialize camera capabilities in static data struct
3996 *
3997 * PARAMETERS :
3998 * @cameraId : camera Id
3999 *
4000 * RETURN : int32_t type of status
4001 * NO_ERROR -- success
4002 * none-zero failure code
4003 *==========================================================================*/
initCapabilities(int cameraId)4004 int QCamera3HardwareInterface::initCapabilities(int cameraId)
4005 {
4006 int rc = 0;
4007 mm_camera_vtbl_t *cameraHandle = NULL;
4008 QCamera3HeapMemory *capabilityHeap = NULL;
4009
4010 cameraHandle = camera_open(cameraId);
4011 if (!cameraHandle) {
4012 ALOGE("%s: camera_open failed", __func__);
4013 rc = -1;
4014 goto open_failed;
4015 }
4016
4017 capabilityHeap = new QCamera3HeapMemory();
4018 if (capabilityHeap == NULL) {
4019 ALOGE("%s: creation of capabilityHeap failed", __func__);
4020 goto heap_creation_failed;
4021 }
4022 /* Allocate memory for capability buffer */
4023 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
4024 if(rc != OK) {
4025 ALOGE("%s: No memory for cappability", __func__);
4026 goto allocate_failed;
4027 }
4028
4029 /* Map memory for capability buffer */
4030 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
4031 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
4032 CAM_MAPPING_BUF_TYPE_CAPABILITY,
4033 capabilityHeap->getFd(0),
4034 sizeof(cam_capability_t));
4035 if(rc < 0) {
4036 ALOGE("%s: failed to map capability buffer", __func__);
4037 goto map_failed;
4038 }
4039
4040 /* Query Capability */
4041 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
4042 if(rc < 0) {
4043 ALOGE("%s: failed to query capability",__func__);
4044 goto query_failed;
4045 }
4046 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
4047 if (!gCamCapability[cameraId]) {
4048 ALOGE("%s: out of memory", __func__);
4049 goto query_failed;
4050 }
4051 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
4052 sizeof(cam_capability_t));
4053 rc = 0;
4054
4055 query_failed:
4056 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
4057 CAM_MAPPING_BUF_TYPE_CAPABILITY);
4058 map_failed:
4059 capabilityHeap->deallocate();
4060 allocate_failed:
4061 delete capabilityHeap;
4062 heap_creation_failed:
4063 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
4064 cameraHandle = NULL;
4065 open_failed:
4066 return rc;
4067 }
4068
4069 /*===========================================================================
4070 * FUNCTION : initParameters
4071 *
4072 * DESCRIPTION: initialize camera parameters
4073 *
4074 * PARAMETERS :
4075 *
4076 * RETURN : int32_t type of status
4077 * NO_ERROR -- success
4078 * none-zero failure code
4079 *==========================================================================*/
initParameters()4080 int QCamera3HardwareInterface::initParameters()
4081 {
4082 int rc = 0;
4083
4084 //Allocate Set Param Buffer
4085 mParamHeap = new QCamera3HeapMemory();
4086 rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
4087 if(rc != OK) {
4088 rc = NO_MEMORY;
4089 ALOGE("Failed to allocate SETPARM Heap memory");
4090 delete mParamHeap;
4091 mParamHeap = NULL;
4092 return rc;
4093 }
4094
4095 //Map memory for parameters buffer
4096 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
4097 CAM_MAPPING_BUF_TYPE_PARM_BUF,
4098 mParamHeap->getFd(0),
4099 sizeof(metadata_buffer_t));
4100 if(rc < 0) {
4101 ALOGE("%s:failed to map SETPARM buffer",__func__);
4102 rc = FAILED_TRANSACTION;
4103 mParamHeap->deallocate();
4104 delete mParamHeap;
4105 mParamHeap = NULL;
4106 return rc;
4107 }
4108
4109 mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
4110 return rc;
4111 }
4112
4113 /*===========================================================================
4114 * FUNCTION : deinitParameters
4115 *
4116 * DESCRIPTION: de-initialize camera parameters
4117 *
4118 * PARAMETERS :
4119 *
4120 * RETURN : NONE
4121 *==========================================================================*/
deinitParameters()4122 void QCamera3HardwareInterface::deinitParameters()
4123 {
4124 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
4125 CAM_MAPPING_BUF_TYPE_PARM_BUF);
4126
4127 mParamHeap->deallocate();
4128 delete mParamHeap;
4129 mParamHeap = NULL;
4130
4131 mParameters = NULL;
4132 }
4133
4134 /*===========================================================================
4135 * FUNCTION : calcMaxJpegSize
4136 *
4137 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
4138 *
4139 * PARAMETERS :
4140 *
4141 * RETURN : max_jpeg_size
4142 *==========================================================================*/
calcMaxJpegSize(uint8_t camera_id)4143 int QCamera3HardwareInterface::calcMaxJpegSize(uint8_t camera_id)
4144 {
4145 int32_t max_jpeg_size = 0;
4146 int temp_width, temp_height;
4147 for (int i = 0; i < gCamCapability[camera_id]->picture_sizes_tbl_cnt; i++) {
4148 temp_width = gCamCapability[camera_id]->picture_sizes_tbl[i].width;
4149 temp_height = gCamCapability[camera_id]->picture_sizes_tbl[i].height;
4150 if (temp_width * temp_height > max_jpeg_size ) {
4151 max_jpeg_size = temp_width * temp_height;
4152 }
4153 }
4154 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
4155 return max_jpeg_size;
4156 }
4157
4158 /*===========================================================================
4159 * FUNCTION : getMaxRawSize
4160 *
4161 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
4162 *
4163 * PARAMETERS :
4164 *
4165 * RETURN : Largest supported Raw Dimension
4166 *==========================================================================*/
getMaxRawSize(uint8_t camera_id)4167 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint8_t camera_id)
4168 {
4169 int max_width = 0;
4170 cam_dimension_t maxRawSize;
4171
4172 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
4173 for (int i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
4174 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
4175 max_width = gCamCapability[camera_id]->raw_dim[i].width;
4176 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
4177 }
4178 }
4179 return maxRawSize;
4180 }
4181
4182
4183 /*===========================================================================
4184 * FUNCTION : calcMaxJpegDim
4185 *
4186 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
4187 *
4188 * PARAMETERS :
4189 *
4190 * RETURN : max_jpeg_dim
4191 *==========================================================================*/
calcMaxJpegDim()4192 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
4193 {
4194 cam_dimension_t max_jpeg_dim;
4195 cam_dimension_t curr_jpeg_dim;
4196 max_jpeg_dim.width = 0;
4197 max_jpeg_dim.height = 0;
4198 curr_jpeg_dim.width = 0;
4199 curr_jpeg_dim.height = 0;
4200 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
4201 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
4202 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
4203 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
4204 max_jpeg_dim.width * max_jpeg_dim.height ) {
4205 max_jpeg_dim.width = curr_jpeg_dim.width;
4206 max_jpeg_dim.height = curr_jpeg_dim.height;
4207 }
4208 }
4209 return max_jpeg_dim;
4210 }
4211
4212
4213 /*===========================================================================
4214 * FUNCTION : initStaticMetadata
4215 *
4216 * DESCRIPTION: initialize the static metadata
4217 *
4218 * PARAMETERS :
4219 * @cameraId : camera Id
4220 *
4221 * RETURN : int32_t type of status
4222 * 0 -- success
4223 * non-zero failure code
4224 *==========================================================================*/
initStaticMetadata(int cameraId)4225 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
4226 {
4227 int rc = 0;
4228 CameraMetadata staticInfo;
4229
4230 bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
4231 if (!facingBack)
4232 gCamCapability[cameraId]->supported_raw_dim_cnt = 0;
4233
4234 /* android.info: hardware level */
4235 uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
4236 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
4237 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
4238 &supportedHardwareLevel, 1);
4239 /*HAL 3 only*/
4240 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4241 &gCamCapability[cameraId]->min_focus_distance, 1);
4242
4243 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
4244 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
4245
4246 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
4247 &aeLockAvailable, 1);
4248
4249 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
4250 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
4251
4252 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
4253 &awbLockAvailable, 1);
4254
4255 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
4256 &gCamCapability[cameraId]->hyper_focal_distance, 1);
4257
4258 /*should be using focal lengths but sensor doesn't provide that info now*/
4259 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4260 &gCamCapability[cameraId]->focal_length,
4261 1);
4262
4263 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4264 gCamCapability[cameraId]->apertures,
4265 gCamCapability[cameraId]->apertures_count);
4266
4267 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4268 gCamCapability[cameraId]->filter_densities,
4269 gCamCapability[cameraId]->filter_densities_count);
4270
4271
4272 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4273 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
4274 gCamCapability[cameraId]->optical_stab_modes_count);
4275
4276 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
4277 gCamCapability[cameraId]->lens_shading_map_size.height};
4278 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
4279 lens_shading_map_size,
4280 sizeof(lens_shading_map_size)/sizeof(int32_t));
4281
4282 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
4283 gCamCapability[cameraId]->sensor_physical_size, 2);
4284
4285 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
4286 gCamCapability[cameraId]->exposure_time_range, 2);
4287
4288 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4289 &gCamCapability[cameraId]->max_frame_duration, 1);
4290
4291 camera_metadata_rational baseGainFactor = {
4292 gCamCapability[cameraId]->base_gain_factor.numerator,
4293 gCamCapability[cameraId]->base_gain_factor.denominator};
4294 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
4295 &baseGainFactor, 1);
4296
4297 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4298 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
4299
4300 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
4301 gCamCapability[cameraId]->pixel_array_size.height};
4302 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4303 pixel_array_size, 2);
4304
4305 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
4306 gCamCapability[cameraId]->active_array_size.top,
4307 gCamCapability[cameraId]->active_array_size.width,
4308 gCamCapability[cameraId]->active_array_size.height};
4309 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4310 active_array_size, 4);
4311
4312 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
4313 &gCamCapability[cameraId]->white_level, 1);
4314
4315 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
4316 gCamCapability[cameraId]->black_level_pattern, 4);
4317
4318 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
4319 &gCamCapability[cameraId]->flash_charge_duration, 1);
4320
4321 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
4322 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
4323
4324 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
4325 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
4326 (int32_t*)&maxFaces, 1);
4327
4328 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
4329 if (0 && gCamCapability[cameraId]->isTimestampCalibrated) {
4330 timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
4331 }
4332 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4333 ×tampSource, 1);
4334
4335 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4336 &gCamCapability[cameraId]->histogram_size, 1);
4337
4338 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4339 &gCamCapability[cameraId]->max_histogram_count, 1);
4340
4341 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
4342 gCamCapability[cameraId]->sharpness_map_size.height};
4343
4344 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
4345 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
4346
4347 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4348 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
4349
4350 int32_t scalar_formats[] = {
4351 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
4352 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
4353 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
4354 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
4355 HAL_PIXEL_FORMAT_RAW10,
4356 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
4357 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
4358 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
4359 scalar_formats,
4360 scalar_formats_count);
4361
4362 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
4363 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
4364 gCamCapability[cameraId]->picture_sizes_tbl_cnt,
4365 available_processed_sizes);
4366 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
4367 available_processed_sizes,
4368 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
4369
4370 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
4371 makeTable(gCamCapability[cameraId]->raw_dim,
4372 gCamCapability[cameraId]->supported_raw_dim_cnt,
4373 available_raw_sizes);
4374 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
4375 available_raw_sizes,
4376 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
4377
4378 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
4379 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
4380 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
4381 available_fps_ranges);
4382 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4383 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
4384
4385 camera_metadata_rational exposureCompensationStep = {
4386 gCamCapability[cameraId]->exp_compensation_step.numerator,
4387 gCamCapability[cameraId]->exp_compensation_step.denominator};
4388 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
4389 &exposureCompensationStep, 1);
4390
4391 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
4392 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4393 availableVstabModes, sizeof(availableVstabModes));
4394
4395 /*HAL 1 and HAL 3 common*/
4396 float maxZoom = 4;
4397 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4398 &maxZoom, 1);
4399
4400 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
4401 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
4402
4403 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
4404 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
4405 max3aRegions[2] = 0; /* AF not supported */
4406 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
4407 max3aRegions, 3);
4408
4409 uint8_t availableFaceDetectModes[] = {
4410 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
4411 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
4412 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4413 availableFaceDetectModes,
4414 sizeof(availableFaceDetectModes));
4415
4416 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
4417 gCamCapability[cameraId]->exposure_compensation_max};
4418 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
4419 exposureCompensationRange,
4420 sizeof(exposureCompensationRange)/sizeof(int32_t));
4421
4422 uint8_t lensFacing = (facingBack) ?
4423 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
4424 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
4425
4426 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4427 available_thumbnail_sizes,
4428 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
4429
4430 /*all sizes will be clubbed into this tag*/
4431 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
4432 uint8_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
4433 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2,
4434 MAX_SIZES_CNT * 2,
4435 gCamCapability[cameraId]->active_array_size,
4436 gCamCapability[cameraId]->max_downscale_factor);
4437 /*android.scaler.availableStreamConfigurations*/
4438 int32_t max_stream_configs_size =
4439 gCamCapability[cameraId]->picture_sizes_tbl_cnt *
4440 sizeof(scalar_formats)/sizeof(int32_t) * 4;
4441 int32_t available_stream_configs[max_stream_configs_size];
4442 int idx = 0;
4443
4444 /* Add input/output stream configurations for each scalar formats*/
4445 for (int j = 0; j < scalar_formats_count; j++) {
4446 switch (scalar_formats[j]) {
4447 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4448 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4449 case HAL_PIXEL_FORMAT_RAW10:
4450 for (int i = 0;
4451 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4452 available_stream_configs[idx] = scalar_formats[j];
4453 available_stream_configs[idx+1] =
4454 gCamCapability[cameraId]->raw_dim[i].width;
4455 available_stream_configs[idx+2] =
4456 gCamCapability[cameraId]->raw_dim[i].height;
4457 available_stream_configs[idx+3] =
4458 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4459 idx+=4;
4460 }
4461 break;
4462 case HAL_PIXEL_FORMAT_BLOB:
4463 for (int i = 0; i < jpeg_sizes_cnt/2; i++) {
4464 available_stream_configs[idx] = scalar_formats[j];
4465 available_stream_configs[idx+1] = available_jpeg_sizes[i*2];
4466 available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1];
4467 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4468 idx+=4;
4469 }
4470 break;
4471
4472 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
4473 case HAL_PIXEL_FORMAT_YCbCr_420_888:
4474 default:
4475 cam_dimension_t largest_picture_size;
4476 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
4477 for (int i = 0;
4478 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4479 available_stream_configs[idx] = scalar_formats[j];
4480 available_stream_configs[idx+1] =
4481 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4482 available_stream_configs[idx+2] =
4483 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4484 available_stream_configs[idx+3] =
4485 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
4486 idx+=4;
4487
4488 /* Book keep largest */
4489 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
4490 >= largest_picture_size.width &&
4491 gCamCapability[cameraId]->picture_sizes_tbl[i].height
4492 >= largest_picture_size.height)
4493 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
4494 }
4495
4496 break;
4497 }
4498 }
4499
4500 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4501 available_stream_configs, idx);
4502 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4503 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4504
4505 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4506 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4507
4508 /* android.scaler.availableMinFrameDurations */
4509 int64_t available_min_durations[max_stream_configs_size];
4510 idx = 0;
4511 for (int j = 0; j < scalar_formats_count; j++) {
4512 switch (scalar_formats[j]) {
4513 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
4514 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
4515 case HAL_PIXEL_FORMAT_RAW10:
4516 for (int i = 0;
4517 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4518 available_min_durations[idx] = scalar_formats[j];
4519 available_min_durations[idx+1] =
4520 gCamCapability[cameraId]->raw_dim[i].width;
4521 available_min_durations[idx+2] =
4522 gCamCapability[cameraId]->raw_dim[i].height;
4523 available_min_durations[idx+3] =
4524 gCamCapability[cameraId]->raw_min_duration[i];
4525 idx+=4;
4526 }
4527 break;
4528 default:
4529 for (int i = 0;
4530 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4531 available_min_durations[idx] = scalar_formats[j];
4532 available_min_durations[idx+1] =
4533 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4534 available_min_durations[idx+2] =
4535 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4536 available_min_durations[idx+3] =
4537 gCamCapability[cameraId]->picture_min_duration[i];
4538 idx+=4;
4539 }
4540 break;
4541 }
4542 }
4543 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
4544 &available_min_durations[0], idx);
4545
4546 int32_t max_jpeg_size = calcMaxJpegSize(cameraId);
4547 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
4548 &max_jpeg_size, 1);
4549
4550 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
4551 size_t size = 0;
4552 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
4553 int32_t val = lookupFwkName(EFFECT_MODES_MAP,
4554 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
4555 gCamCapability[cameraId]->supported_effects[i]);
4556 if (val != NAME_NOT_FOUND) {
4557 avail_effects[size] = (uint8_t)val;
4558 size++;
4559 }
4560 }
4561 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
4562 avail_effects,
4563 size);
4564
4565 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
4566 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
4567 int32_t supported_scene_modes_cnt = 0;
4568 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
4569 int32_t val = lookupFwkName(SCENE_MODES_MAP,
4570 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4571 gCamCapability[cameraId]->supported_scene_modes[i]);
4572 if (val != NAME_NOT_FOUND) {
4573 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
4574 supported_indexes[supported_scene_modes_cnt] = i;
4575 supported_scene_modes_cnt++;
4576 }
4577 }
4578
4579 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4580 avail_scene_modes,
4581 supported_scene_modes_cnt);
4582
4583 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
4584 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
4585 supported_scene_modes_cnt,
4586 scene_mode_overrides,
4587 supported_indexes,
4588 cameraId);
4589 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
4590 scene_mode_overrides,
4591 supported_scene_modes_cnt*3);
4592
4593 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
4594 ANDROID_CONTROL_MODE_AUTO,
4595 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
4596 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
4597 available_control_modes,
4598 3);
4599
4600 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
4601 size = 0;
4602 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
4603 int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
4604 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
4605 gCamCapability[cameraId]->supported_antibandings[i]);
4606 if (val != NAME_NOT_FOUND) {
4607 avail_antibanding_modes[size] = (uint8_t)val;
4608 size++;
4609 }
4610
4611 }
4612 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4613 avail_antibanding_modes,
4614 size);
4615
4616 uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
4617 size = 0;
4618 if (0 == gCamCapability[cameraId]->aberration_modes_count) {
4619 avail_abberation_modes[0] =
4620 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
4621 size++;
4622 } else {
4623 for (size_t i = 0; i < gCamCapability[cameraId]->aberration_modes_count; i++) {
4624 int32_t val = lookupFwkName(COLOR_ABERRATION_MAP,
4625 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
4626 gCamCapability[cameraId]->aberration_modes[i]);
4627 if (val != NAME_NOT_FOUND) {
4628 avail_abberation_modes[size] = (uint8_t)val;
4629 size++;
4630 } else {
4631 ALOGE("%s: Invalid CAC mode %d", __func__,
4632 gCamCapability[cameraId]->aberration_modes[i]);
4633 break;
4634 }
4635 }
4636
4637 }
4638 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4639 avail_abberation_modes,
4640 size);
4641
4642 char cafProp[PROPERTY_VALUE_MAX];
4643 memset(cafProp, 0, sizeof(cafProp));
4644 property_get("persist.camera.caf.disable", cafProp, "0");
4645 uint8_t cafDisabled = atoi(cafProp);
4646
4647 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
4648 size = 0;
4649 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
4650 if (cafDisabled &&
4651 ((gCamCapability[cameraId]->supported_focus_modes[i]
4652 == CAM_FOCUS_MODE_CONTINOUS_PICTURE) ||
4653 (gCamCapability[cameraId]->supported_focus_modes[i]
4654 == CAM_FOCUS_MODE_CONTINOUS_VIDEO)))
4655 continue;
4656
4657 int32_t val = lookupFwkName(FOCUS_MODES_MAP,
4658 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4659 gCamCapability[cameraId]->supported_focus_modes[i]);
4660 if (val != NAME_NOT_FOUND) {
4661 avail_af_modes[size] = (uint8_t)val;
4662 size++;
4663 }
4664 }
4665 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
4666 avail_af_modes,
4667 size);
4668
4669 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
4670 size = 0;
4671 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
4672 int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4673 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4674 gCamCapability[cameraId]->supported_white_balances[i]);
4675 if (val != NAME_NOT_FOUND) {
4676 avail_awb_modes[size] = (uint8_t)val;
4677 size++;
4678 }
4679 }
4680 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
4681 avail_awb_modes,
4682 size);
4683
4684 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
4685 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
4686 available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
4687
4688 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
4689 available_flash_levels,
4690 gCamCapability[cameraId]->supported_flash_firing_level_cnt);
4691
4692 uint8_t flashAvailable;
4693 if (gCamCapability[cameraId]->flash_available)
4694 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
4695 else
4696 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
4697 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
4698 &flashAvailable, 1);
4699
4700 uint8_t avail_ae_modes[5];
4701 size = 0;
4702 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
4703 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
4704 size++;
4705 }
4706 if (flashAvailable) {
4707 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
4708 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
4709 }
4710 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
4711 avail_ae_modes,
4712 size);
4713
4714 int32_t sensitivity_range[2];
4715 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
4716 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
4717 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
4718 sensitivity_range,
4719 sizeof(sensitivity_range) / sizeof(int32_t));
4720
4721 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4722 &gCamCapability[cameraId]->max_analog_sensitivity,
4723 1);
4724
4725 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
4726 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
4727 &sensor_orientation,
4728 1);
4729
4730 int32_t max_output_streams[3] = {
4731 MAX_STALLING_STREAMS,
4732 MAX_PROCESSED_STREAMS,
4733 MAX_RAW_STREAMS};
4734 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
4735 max_output_streams,
4736 3);
4737
4738 uint8_t avail_leds = 0;
4739 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
4740 &avail_leds, 0);
4741
4742 uint8_t focus_dist_calibrated;
4743 int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
4744 sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
4745 gCamCapability[cameraId]->focus_dist_calibrated);
4746 if (val != NAME_NOT_FOUND) {
4747 focus_dist_calibrated = (uint8_t)val;
4748 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4749 &focus_dist_calibrated, 1);
4750 }
4751
4752 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
4753 size = 0;
4754 for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
4755 i++) {
4756 int32_t val = lookupFwkName(TEST_PATTERN_MAP,
4757 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
4758 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
4759 if (val != NAME_NOT_FOUND) {
4760 avail_testpattern_modes[size] = val;
4761 size++;
4762 }
4763 }
4764 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4765 avail_testpattern_modes,
4766 size);
4767
4768 uint8_t max_pipeline_depth = MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY;
4769 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
4770 &max_pipeline_depth,
4771 1);
4772
4773 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
4774 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4775 &partial_result_count,
4776 1);
4777
4778 int32_t max_stall_duration = MAX_REPROCESS_STALL;
4779 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
4780
4781 uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
4782 uint8_t available_capabilities_count = 0;
4783 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
4784 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
4785 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
4786 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS;
4787 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE;
4788 if (facingBack) {
4789 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
4790 }
4791 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4792 available_capabilities,
4793 available_capabilities_count);
4794
4795 int32_t max_input_streams = 0;
4796 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4797 &max_input_streams,
4798 1);
4799
4800 int32_t io_format_map[] = {};
4801 ;
4802 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4803 io_format_map, 0);
4804
4805 int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:CAM_MAX_SYNC_LATENCY;
4806 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
4807 &max_latency,
4808 1);
4809
4810 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
4811 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
4812 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4813 available_hot_pixel_modes,
4814 2);
4815
4816 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
4817 ANDROID_SHADING_MODE_FAST,
4818 ANDROID_SHADING_MODE_HIGH_QUALITY};
4819 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
4820 available_shading_modes,
4821 3);
4822
4823 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
4824 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
4825 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
4826 available_lens_shading_map_modes,
4827 2);
4828
4829 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
4830 ANDROID_EDGE_MODE_FAST,
4831 ANDROID_EDGE_MODE_HIGH_QUALITY};
4832 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4833 available_edge_modes,
4834 3);
4835
4836 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
4837 ANDROID_NOISE_REDUCTION_MODE_FAST,
4838 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
4839 ANDROID_NOISE_REDUCTION_MODE_MINIMAL};
4840 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4841 available_noise_red_modes,
4842 4);
4843
4844 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
4845 ANDROID_TONEMAP_MODE_FAST,
4846 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
4847 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4848 available_tonemap_modes,
4849 3);
4850
4851 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
4852 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4853 available_hot_pixel_map_modes,
4854 1);
4855
4856 uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4857 sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4858 gCamCapability[cameraId]->reference_illuminant1);
4859 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
4860 &fwkReferenceIlluminant, 1);
4861
4862 fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
4863 sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
4864 gCamCapability[cameraId]->reference_illuminant2);
4865 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4866 &fwkReferenceIlluminant, 1);
4867
4868 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
4869 (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
4870 3*3);
4871
4872 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
4873 (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
4874 3*3);
4875
4876 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
4877 (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
4878 3*3);
4879
4880 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
4881 (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
4882 3*3);
4883
4884 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4885 (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
4886 3*3);
4887
4888 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
4889 (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
4890 3*3);
4891
4892 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
4893 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
4894 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
4895 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4896 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
4897 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4898 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
4899 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
4900 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
4901 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
4902 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
4903 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
4904 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4905 ANDROID_JPEG_GPS_COORDINATES,
4906 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
4907 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
4908 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
4909 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4910 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
4911 ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
4912 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
4913 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
4914 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
4915 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
4916 ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
4917 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4918 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
4919 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4920 ANDROID_BLACK_LEVEL_LOCK };
4921
4922 size_t request_keys_cnt =
4923 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
4924 //NOTE: Please increase available_request_keys array size before
4925 //adding any new entries.
4926 int32_t available_request_keys[request_keys_cnt+1];
4927 memcpy(available_request_keys, request_keys_basic,
4928 sizeof(request_keys_basic));
4929 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4930 available_request_keys[request_keys_cnt++] =
4931 ANDROID_CONTROL_AF_REGIONS;
4932 }
4933 //NOTE: Please increase available_request_keys array size before
4934 //adding any new entries.
4935 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
4936 available_request_keys, request_keys_cnt);
4937
4938 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
4939 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
4940 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
4941 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
4942 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
4943 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
4944 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
4945 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
4946 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
4947 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
4948 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
4949 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
4950 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
4951 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
4952 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4953 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
4954 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4955 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
4956 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4957 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4958 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
4959 ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
4960 ANDROID_STATISTICS_FACE_SCORES};
4961 size_t result_keys_cnt =
4962 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
4963 //NOTE: Please increase available_result_keys array size before
4964 //adding any new entries.
4965 int32_t available_result_keys[result_keys_cnt+3];
4966 memcpy(available_result_keys, result_keys_basic,
4967 sizeof(result_keys_basic));
4968 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4969 available_result_keys[result_keys_cnt++] =
4970 ANDROID_CONTROL_AF_REGIONS;
4971 }
4972 if (facingBack) {
4973 available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_NOISE_PROFILE;
4974 available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_GREEN_SPLIT;
4975 }
4976 //NOTE: Please increase available_result_keys array size before
4977 //adding any new entries.
4978
4979 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4980 available_result_keys, result_keys_cnt);
4981
4982 int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4983 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4984 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
4985 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
4986 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4987 ANDROID_SCALER_CROPPING_TYPE,
4988 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4989 ANDROID_SYNC_MAX_LATENCY,
4990 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4991 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4992 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
4993 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
4994 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4995 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4996 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4997 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4998 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4999 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5000 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
5001 ANDROID_LENS_FACING,
5002 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
5003 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
5004 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
5005 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
5006 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5007 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
5008 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5009 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
5010 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
5011 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
5012 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
5013 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
5014 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5015 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5016 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5017 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5018 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
5019 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
5020 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
5021 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5022 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5023 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5024 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5025 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
5026 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
5027 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
5028 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
5029 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
5030 ANDROID_TONEMAP_MAX_CURVE_POINTS,
5031 ANDROID_CONTROL_AVAILABLE_MODES,
5032 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
5033 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
5034 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
5035 ANDROID_SHADING_AVAILABLE_MODES,
5036 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
5037 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
5038 available_characteristics_keys,
5039 sizeof(available_characteristics_keys)/sizeof(int32_t));
5040
5041 /*available stall durations depend on the hw + sw and will be different for different devices */
5042 /*have to add for raw after implementation*/
5043 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
5044 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
5045
5046 size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
5047 int64_t available_stall_durations[available_stall_size];
5048 idx = 0;
5049 for (uint32_t j = 0; j < stall_formats_count; j++) {
5050 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
5051 for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5052 available_stall_durations[idx] = stall_formats[j];
5053 available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5054 available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5055 available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
5056 idx+=4;
5057 }
5058 } else {
5059 for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5060 available_stall_durations[idx] = stall_formats[j];
5061 available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
5062 available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
5063 available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
5064 idx+=4;
5065 }
5066 }
5067 }
5068 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
5069 available_stall_durations,
5070 idx);
5071 //QCAMERA3_OPAQUE_RAW
5072 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
5073 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
5074 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
5075 case LEGACY_RAW:
5076 if (gCamCapability[cameraId]->white_level == (1<<8)-1)
5077 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
5078 else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
5079 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
5080 else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
5081 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
5082 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
5083 break;
5084 case MIPI_RAW:
5085 if (gCamCapability[cameraId]->white_level == (1<<8)-1)
5086 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
5087 else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
5088 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
5089 else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
5090 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
5091 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
5092 break;
5093 default:
5094 ALOGE("%s: unknown opaque_raw_format %d", __func__,
5095 gCamCapability[cameraId]->opaque_raw_fmt);
5096 break;
5097 }
5098 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
5099
5100 if (gCamCapability[cameraId]->supported_raw_dim_cnt) {
5101 int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
5102 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5103 cam_stream_buf_plane_info_t buf_planes;
5104 strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
5105 strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
5106 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
5107 &gCamCapability[cameraId]->padding_info, &buf_planes);
5108 strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
5109 }
5110 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
5111 3*gCamCapability[cameraId]->supported_raw_dim_cnt);
5112 }
5113 gStaticMetadata[cameraId] = staticInfo.release();
5114 return rc;
5115 }
5116
5117 /*===========================================================================
5118 * FUNCTION : makeTable
5119 *
5120 * DESCRIPTION: make a table of sizes
5121 *
5122 * PARAMETERS :
5123 *
5124 *
5125 *==========================================================================*/
makeTable(cam_dimension_t * dimTable,uint8_t size,int32_t * sizeTable)5126 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
5127 int32_t* sizeTable)
5128 {
5129 int j = 0;
5130 for (int i = 0; i < size; i++) {
5131 sizeTable[j] = dimTable[i].width;
5132 sizeTable[j+1] = dimTable[i].height;
5133 j+=2;
5134 }
5135 }
5136
5137 /*===========================================================================
5138 * FUNCTION : makeFPSTable
5139 *
5140 * DESCRIPTION: make a table of fps ranges
5141 *
5142 * PARAMETERS :
5143 *
5144 *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,uint8_t size,int32_t * fpsRangesTable)5145 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
5146 int32_t* fpsRangesTable)
5147 {
5148 int j = 0;
5149 for (int i = 0; i < size; i++) {
5150 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
5151 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
5152 j+=2;
5153 }
5154 }
5155
5156 /*===========================================================================
5157 * FUNCTION : makeOverridesList
5158 *
5159 * DESCRIPTION: make a list of scene mode overrides
5160 *
5161 * PARAMETERS :
5162 *
5163 *
5164 *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,uint8_t size,uint8_t * overridesList,uint8_t * supported_indexes,int camera_id)5165 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
5166 uint8_t size, uint8_t* overridesList,
5167 uint8_t* supported_indexes,
5168 int camera_id)
5169 {
5170 /*daemon will give a list of overrides for all scene modes.
5171 However we should send the fwk only the overrides for the scene modes
5172 supported by the framework*/
5173 int j = 0, index = 0, supt = 0;
5174 uint8_t focus_override;
5175 for (int i = 0; i < size; i++) {
5176 supt = 0;
5177 index = supported_indexes[i];
5178 overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
5179 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
5180 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
5181 overridesTable[index].awb_mode);
5182 focus_override = (uint8_t)overridesTable[index].af_mode;
5183 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
5184 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
5185 supt = 1;
5186 break;
5187 }
5188 }
5189 if (supt) {
5190 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
5191 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
5192 focus_override);
5193 } else {
5194 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
5195 }
5196 j+=3;
5197 }
5198 }
5199
5200 /*===========================================================================
5201 * FUNCTION : filterJpegSizes
5202 *
5203 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
5204 * could be downscaled to
5205 *
5206 * PARAMETERS :
5207 *
5208 * RETURN : length of jpegSizes array
5209 *==========================================================================*/
5210
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,uint8_t processedSizesCnt,uint8_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)5211 uint8_t QCamera3HardwareInterface::filterJpegSizes(int32_t* jpegSizes, int32_t* processedSizes,
5212 uint8_t processedSizesCnt,
5213 uint8_t maxCount,
5214 cam_rect_t active_array_size,
5215 uint8_t downscale_factor)
5216 {
5217 if (downscale_factor == 0) {
5218 downscale_factor = 1;
5219 }
5220 int32_t min_width = active_array_size.width / downscale_factor;
5221 int32_t min_height = active_array_size.height / downscale_factor;
5222 uint8_t jpegSizesCnt = 0;
5223 if (processedSizesCnt > maxCount) {
5224 processedSizesCnt = maxCount;
5225 }
5226 for (int i = 0; i < processedSizesCnt; i+=2) {
5227 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
5228 jpegSizes[jpegSizesCnt] = processedSizes[i];
5229 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
5230 jpegSizesCnt += 2;
5231 }
5232 }
5233 return jpegSizesCnt;
5234 }
5235
5236 /*===========================================================================
5237 * FUNCTION : getPreviewHalPixelFormat
5238 *
5239 * DESCRIPTION: convert the format to type recognized by framework
5240 *
5241 * PARAMETERS : format : the format from backend
5242 *
5243 ** RETURN : format recognized by framework
5244 *
5245 *==========================================================================*/
getScalarFormat(int32_t format)5246 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
5247 {
5248 int32_t halPixelFormat;
5249
5250 switch (format) {
5251 case CAM_FORMAT_YUV_420_NV12:
5252 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
5253 break;
5254 case CAM_FORMAT_YUV_420_NV21:
5255 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5256 break;
5257 case CAM_FORMAT_YUV_420_NV21_ADRENO:
5258 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
5259 break;
5260 case CAM_FORMAT_YUV_420_YV12:
5261 halPixelFormat = HAL_PIXEL_FORMAT_YV12;
5262 break;
5263 case CAM_FORMAT_YUV_422_NV16:
5264 case CAM_FORMAT_YUV_422_NV61:
5265 default:
5266 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
5267 break;
5268 }
5269 return halPixelFormat;
5270 }
5271 /*===========================================================================
5272 * FUNCTION : computeNoiseModelEntryS
5273 *
5274 * DESCRIPTION: function to map a given sensitivity to the S noise
5275 * model parameters in the DNG noise model.
5276 *
5277 * PARAMETERS : sens : the sensor sensitivity
5278 *
5279 ** RETURN : S (sensor amplification) noise
5280 *
5281 *==========================================================================*/
5282
computeNoiseModelEntryS(int32_t sens)5283 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
5284 double s = 4.290559e-06 * sens + 4.370087e-05;
5285 return s < 0.0 ? 0.0 : s;
5286 }
5287
5288 /*===========================================================================
5289 * FUNCTION : computeNoiseModelEntryO
5290 *
5291 * DESCRIPTION: function to map a given sensitivity to the O noise
5292 * model parameters in the DNG noise model.
5293 *
5294 * PARAMETERS : sens : the sensor sensitivity
5295 *
5296 ** RETURN : O (sensor readout) noise
5297 *
5298 *==========================================================================*/
5299
computeNoiseModelEntryO(int32_t sens)5300 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
5301 double digital_gain = sens / 320.0;
5302 digital_gain = digital_gain < 1.0 ? 1.0 : digital_gain;
5303 double o = 6.011498e-11 * sens * sens + 2.173219e-06 * digital_gain * digital_gain;
5304 return o < 0.0 ? 0.0 : o;
5305 }
5306
5307 /*===========================================================================
5308 * FUNCTION : getSensorSensitivity
5309 *
5310 * DESCRIPTION: convert iso_mode to an integer value
5311 *
5312 * PARAMETERS : iso_mode : the iso_mode supported by sensor
5313 *
5314 ** RETURN : sensitivity supported by sensor
5315 *
5316 *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)5317 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
5318 {
5319 int32_t sensitivity;
5320
5321 switch (iso_mode) {
5322 case CAM_ISO_MODE_100:
5323 sensitivity = 100;
5324 break;
5325 case CAM_ISO_MODE_200:
5326 sensitivity = 200;
5327 break;
5328 case CAM_ISO_MODE_400:
5329 sensitivity = 400;
5330 break;
5331 case CAM_ISO_MODE_800:
5332 sensitivity = 800;
5333 break;
5334 case CAM_ISO_MODE_1600:
5335 sensitivity = 1600;
5336 break;
5337 default:
5338 sensitivity = -1;
5339 break;
5340 }
5341 return sensitivity;
5342 }
5343
5344 /*===========================================================================
5345 * FUNCTION : AddSetParmEntryToBatch
5346 *
5347 * DESCRIPTION: add set parameter entry into batch
5348 *
5349 * PARAMETERS :
5350 * @p_table : ptr to parameter buffer
5351 * @paramType : parameter type
5352 * @paramLength : length of parameter value
5353 * @paramValue : ptr to parameter value
5354 *
5355 * RETURN : int32_t type of status
5356 * NO_ERROR -- success
5357 * none-zero failure code
5358 *==========================================================================*/
AddSetParmEntryToBatch(parm_buffer_t * p_table,cam_intf_parm_type_t paramType,uint32_t paramLength,void * paramValue)5359 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
5360 cam_intf_parm_type_t paramType,
5361 uint32_t paramLength,
5362 void *paramValue)
5363 {
5364 void* dst;
5365 if ((NULL == p_table) || (NULL == paramValue) ||
5366 (paramType >= CAM_INTF_PARM_MAX)) {
5367 ALOGE("%s: Invalid p_table: %p, paramValue: %p, param type: %d",
5368 __func__, p_table, paramValue, paramType);
5369 return BAD_VALUE;
5370 }
5371 /*************************************************************************
5372 * Copy contents into entry *
5373 *************************************************************************/
5374 if (paramLength > get_size_of(paramType)) {
5375 ALOGE("%s: input larger than max entry size, type=%d, length =%d",
5376 __func__, paramType, paramLength);
5377 return BAD_VALUE;
5378 }
5379 dst = get_pointer_of(paramType, p_table);
5380 if(NULL != dst){
5381 memcpy(dst, paramValue, paramLength);
5382 p_table->is_valid[paramType] = 1;
5383 }
5384 return NO_ERROR;
5385 }
5386
5387 /*===========================================================================
5388 * FUNCTION : lookupFwkName
5389 *
5390 * DESCRIPTION: In case the enum is not same in fwk and backend
5391 * make sure the parameter is correctly propogated
5392 *
5393 * PARAMETERS :
5394 * @arr : map between the two enums
5395 * @len : len of the map
5396 * @hal_name : name of the hal_parm to map
5397 *
5398 * RETURN : int type of status
5399 * fwk_name -- success
5400 * none-zero failure code
5401 *==========================================================================*/
lookupFwkName(const QCameraMap arr[],int len,int hal_name)5402 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
5403 int len, int hal_name)
5404 {
5405
5406 for (int i = 0; i < len; i++) {
5407 if (arr[i].hal_name == hal_name)
5408 return arr[i].fwk_name;
5409 }
5410
5411 /* Not able to find matching framework type is not necessarily
5412 * an error case. This happens when mm-camera supports more attributes
5413 * than the frameworks do */
5414 CDBG_HIGH("%s: Cannot find matching framework type", __func__);
5415 return NAME_NOT_FOUND;
5416 }
5417
5418 /*===========================================================================
5419 * FUNCTION : lookupHalName
5420 *
5421 * DESCRIPTION: In case the enum is not same in fwk and backend
5422 * make sure the parameter is correctly propogated
5423 *
5424 * PARAMETERS :
5425 * @arr : map between the two enums
5426 * @len : len of the map
5427 * @fwk_name : name of the hal_parm to map
5428 *
5429 * RETURN : int32_t type of status
5430 * hal_name -- success
5431 * none-zero failure code
5432 *==========================================================================*/
lookupHalName(const QCameraMap arr[],int len,unsigned int fwk_name)5433 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
5434 int len, unsigned int fwk_name)
5435 {
5436 for (int i = 0; i < len; i++) {
5437 if (arr[i].fwk_name == fwk_name)
5438 return arr[i].hal_name;
5439 }
5440 ALOGE("%s: Cannot find matching hal type", __func__);
5441 return NAME_NOT_FOUND;
5442 }
5443
5444 /*===========================================================================
5445 * FUNCTION : lookupProp
5446 *
5447 * DESCRIPTION: lookup a value by its name
5448 *
5449 * PARAMETERS :
5450 * @attr : map contains <name, value>
5451 * @len : size of the map
5452 * @name : name to be looked up
5453 *
5454 * RETURN : Value if found
5455 * CAM_CDS_MODE_MAX if not found
5456 *==========================================================================*/
lookupProp(const QCameraPropMap arr[],int len,const char * name)5457 cam_cds_mode_type_t QCamera3HardwareInterface::lookupProp(const QCameraPropMap arr[],
5458 int len, const char *name)
5459 {
5460 if (name) {
5461 for (int i = 0; i < len; i++) {
5462 if (!strcmp(arr[i].desc, name)) {
5463 return arr[i].val;
5464 }
5465 }
5466 }
5467 return CAM_CDS_MODE_MAX;
5468 }
5469
5470 /*===========================================================================
5471 * FUNCTION : getCapabilities
5472 *
5473 * DESCRIPTION: query camera capabilities
5474 *
5475 * PARAMETERS :
5476 * @cameraId : camera Id
5477 * @info : camera info struct to be filled in with camera capabilities
5478 *
5479 * RETURN : int32_t type of status
5480 * NO_ERROR -- success
5481 * none-zero failure code
5482 *==========================================================================*/
getCamInfo(int cameraId,struct camera_info * info)5483 int QCamera3HardwareInterface::getCamInfo(int cameraId,
5484 struct camera_info *info)
5485 {
5486 ATRACE_CALL();
5487 int rc = 0;
5488
5489 if (NULL == gCamCapability[cameraId]) {
5490 rc = initCapabilities(cameraId);
5491 if (rc < 0) {
5492 //pthread_mutex_unlock(&g_camlock);
5493 return rc;
5494 }
5495 }
5496
5497 if (NULL == gStaticMetadata[cameraId]) {
5498 rc = initStaticMetadata(cameraId);
5499 if (rc < 0) {
5500 return rc;
5501 }
5502 }
5503
5504 switch(gCamCapability[cameraId]->position) {
5505 case CAM_POSITION_BACK:
5506 info->facing = CAMERA_FACING_BACK;
5507 break;
5508
5509 case CAM_POSITION_FRONT:
5510 info->facing = CAMERA_FACING_FRONT;
5511 break;
5512
5513 default:
5514 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
5515 rc = -1;
5516 break;
5517 }
5518
5519 info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
5520 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
5521 info->static_camera_characteristics = gStaticMetadata[cameraId];
5522
5523 //For now assume both cameras can operate independently.
5524 info->conflicting_devices = NULL;
5525 info->conflicting_devices_length = 0;
5526
5527 //resource cost is 100 * MIN(1.0, m/M),
5528 //where m is throughput requirement with maximum stream configuration
5529 //and M is CPP maximum throughput.
5530 float max_fps = 0.0;
5531 for (uint32_t i = 0;
5532 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
5533 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
5534 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
5535 }
5536 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
5537 gCamCapability[cameraId]->active_array_size.width *
5538 gCamCapability[cameraId]->active_array_size.height * max_fps /
5539 gCamCapability[cameraId]->max_pixel_bandwidth;
5540 info->resource_cost = 100 * MIN(1.0, ratio);
5541 ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
5542 info->resource_cost);
5543
5544 return rc;
5545 }
5546
5547 /*===========================================================================
5548 * FUNCTION : translateCapabilityToMetadata
5549 *
5550 * DESCRIPTION: translate the capability into camera_metadata_t
5551 *
5552 * PARAMETERS : type of the request
5553 *
5554 *
5555 * RETURN : success: camera_metadata_t*
5556 * failure: NULL
5557 *
5558 *==========================================================================*/
translateCapabilityToMetadata(int type)5559 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
5560 {
5561 pthread_mutex_lock(&mMutex);
5562
5563 if (mDefaultMetadata[type] != NULL) {
5564 pthread_mutex_unlock(&mMutex);
5565 return mDefaultMetadata[type];
5566 }
5567 //first time we are handling this request
5568 //fill up the metadata structure using the wrapper class
5569 CameraMetadata settings;
5570 //translate from cam_capability_t to camera_metadata_tag_t
5571 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
5572 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
5573 int32_t defaultRequestID = 0;
5574 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
5575
5576 /* OIS disable */
5577 char ois_prop[PROPERTY_VALUE_MAX];
5578 memset(ois_prop, 0, sizeof(ois_prop));
5579 property_get("persist.camera.ois.disable", ois_prop, "0");
5580 uint8_t ois_disable = atoi(ois_prop);
5581
5582 /* OIS/EIS disable */
5583 char eis_prop[PROPERTY_VALUE_MAX];
5584 memset(eis_prop, 0, sizeof(eis_prop));
5585 property_get("camera.eis.enable", eis_prop, "0");
5586 mEisEnable = atoi(eis_prop);
5587
5588 /* Force video to use OIS */
5589 char videoOisProp[PROPERTY_VALUE_MAX];
5590 memset(videoOisProp, 0, sizeof(videoOisProp));
5591 property_get("persist.camera.ois.video", videoOisProp, "1");
5592 uint8_t forceVideoOis = atoi(videoOisProp);
5593
5594 uint8_t controlIntent = 0;
5595 uint8_t focusMode;
5596 uint8_t vsMode;
5597 uint8_t optStabMode;
5598 uint8_t cacMode;
5599 uint8_t edge_mode;
5600 uint8_t noise_red_mode;
5601 uint8_t tonemap_mode;
5602 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5603 switch (type) {
5604 case CAMERA3_TEMPLATE_PREVIEW:
5605 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
5606 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5607 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5608 edge_mode = ANDROID_EDGE_MODE_FAST;
5609 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5610 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5611 break;
5612 case CAMERA3_TEMPLATE_STILL_CAPTURE:
5613 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
5614 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5615 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5616 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
5617 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
5618 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
5619 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
5620 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
5621 break;
5622 case CAMERA3_TEMPLATE_VIDEO_RECORD:
5623 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
5624 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5625 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5626 edge_mode = ANDROID_EDGE_MODE_FAST;
5627 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5628 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5629 if (forceVideoOis)
5630 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5631 break;
5632 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
5633 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
5634 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
5635 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5636 edge_mode = ANDROID_EDGE_MODE_FAST;
5637 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5638 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5639 if (forceVideoOis)
5640 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5641 break;
5642 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
5643 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
5644 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
5645 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5646 edge_mode = ANDROID_EDGE_MODE_FAST;
5647 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5648 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5649 break;
5650 case CAMERA3_TEMPLATE_MANUAL:
5651 edge_mode = ANDROID_EDGE_MODE_FAST;
5652 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5653 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5654 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
5655 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5656 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5657 break;
5658 default:
5659 edge_mode = ANDROID_EDGE_MODE_FAST;
5660 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
5661 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
5662 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
5663 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5664 break;
5665 }
5666 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
5667 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
5668 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
5669 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
5670 }
5671 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
5672
5673 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5674 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
5675 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
5676 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
5677 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
5678 || ois_disable)
5679 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
5680 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
5681
5682 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
5683 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
5684
5685 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
5686 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
5687
5688 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
5689 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
5690
5691 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
5692 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
5693
5694 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
5695 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
5696
5697 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
5698 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
5699
5700 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
5701 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
5702
5703 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
5704 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
5705
5706 /*flash*/
5707 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
5708 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
5709
5710 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
5711 settings.update(ANDROID_FLASH_FIRING_POWER,
5712 &flashFiringLevel, 1);
5713
5714 /* lens */
5715 float default_aperture = gCamCapability[mCameraId]->apertures[0];
5716 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
5717
5718 if (gCamCapability[mCameraId]->filter_densities_count) {
5719 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
5720 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
5721 gCamCapability[mCameraId]->filter_densities_count);
5722 }
5723
5724 float default_focal_length = gCamCapability[mCameraId]->focal_length;
5725 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
5726
5727 float default_focus_distance = 0;
5728 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
5729
5730 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
5731 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
5732
5733 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5734 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5735
5736 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
5737 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
5738
5739 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
5740 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
5741
5742 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
5743 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
5744
5745 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
5746 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
5747
5748 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5749 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5750
5751 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5752 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
5753
5754 /* Exposure time(Update the Min Exposure Time)*/
5755 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
5756 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
5757
5758 /* frame duration */
5759 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
5760 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
5761
5762 /* sensitivity */
5763 static const int32_t default_sensitivity = 100;
5764 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
5765
5766 /*edge mode*/
5767 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
5768
5769 /*noise reduction mode*/
5770 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
5771
5772 /*color correction mode*/
5773 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
5774 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
5775
5776 /*transform matrix mode*/
5777 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
5778
5779 uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
5780 settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
5781
5782 int32_t scaler_crop_region[4];
5783 scaler_crop_region[0] = 0;
5784 scaler_crop_region[1] = 0;
5785 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
5786 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
5787 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
5788
5789 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
5790 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
5791
5792 /*focus distance*/
5793 float focus_distance = 0.0;
5794 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
5795
5796 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
5797 float max_range = 0.0;
5798 float max_fixed_fps = 0.0;
5799 int32_t fps_range[2] = {0, 0};
5800 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
5801 i++) {
5802 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
5803 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5804 if (type == CAMERA3_TEMPLATE_PREVIEW ||
5805 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
5806 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
5807 if (range > max_range) {
5808 fps_range[0] =
5809 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5810 fps_range[1] =
5811 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5812 max_range = range;
5813 }
5814 } else {
5815 if (range < 0.01 && max_fixed_fps <
5816 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
5817 fps_range[0] =
5818 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
5819 fps_range[1] =
5820 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5821 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
5822 }
5823 }
5824 }
5825 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
5826
5827 /*precapture trigger*/
5828 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
5829 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
5830
5831 /*af trigger*/
5832 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
5833 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
5834
5835 /* ae & af regions */
5836 int32_t active_region[] = {
5837 gCamCapability[mCameraId]->active_array_size.left,
5838 gCamCapability[mCameraId]->active_array_size.top,
5839 gCamCapability[mCameraId]->active_array_size.left +
5840 gCamCapability[mCameraId]->active_array_size.width,
5841 gCamCapability[mCameraId]->active_array_size.top +
5842 gCamCapability[mCameraId]->active_array_size.height,
5843 0};
5844 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
5845 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
5846
5847 /* black level lock */
5848 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
5849 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
5850
5851 /* face detect mode */
5852 uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
5853 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
5854
5855 /* lens shading map mode */
5856 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
5857 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type &&
5858 gCamCapability[mCameraId]->supported_raw_dim_cnt > 0) {
5859 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
5860 }
5861 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
5862
5863 //special defaults for manual template
5864 if (type == CAMERA3_TEMPLATE_MANUAL) {
5865 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
5866 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
5867
5868 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
5869 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
5870
5871 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
5872 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
5873
5874 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
5875 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
5876
5877 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
5878 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
5879
5880 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
5881 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
5882 }
5883
5884 /* CDS default */
5885 char prop[PROPERTY_VALUE_MAX];
5886 memset(prop, 0, sizeof(prop));
5887 property_get("persist.camera.CDS", prop, "Auto");
5888 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
5889 cds_mode = lookupProp(CDS_MAP, sizeof(CDS_MAP)/sizeof(QCameraPropMap), prop);
5890 if (CAM_CDS_MODE_MAX == cds_mode) {
5891 cds_mode = CAM_CDS_MODE_AUTO;
5892 }
5893 int32_t mode = cds_mode;
5894 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
5895
5896 mDefaultMetadata[type] = settings.release();
5897
5898 pthread_mutex_unlock(&mMutex);
5899 return mDefaultMetadata[type];
5900 }
5901
5902 /*===========================================================================
5903 * FUNCTION : setFrameParameters
5904 *
5905 * DESCRIPTION: set parameters per frame as requested in the metadata from
5906 * framework
5907 *
5908 * PARAMETERS :
5909 * @request : request that needs to be serviced
5910 * @streamID : Stream ID of all the requested streams
5911 *
5912 * RETURN : success: NO_ERROR
5913 * failure:
5914 *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,uint32_t snapshotStreamId)5915 int QCamera3HardwareInterface::setFrameParameters(
5916 camera3_capture_request_t *request,
5917 cam_stream_ID_t streamID,
5918 uint32_t snapshotStreamId)
5919 {
5920 /*translate from camera_metadata_t type to parm_type_t*/
5921 int rc = 0;
5922 int32_t hal_version = CAM_HAL_V3;
5923
5924 memset(mParameters, 0, sizeof(parm_buffer_t));
5925 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
5926 sizeof(hal_version), &hal_version);
5927 if (rc < 0) {
5928 ALOGE("%s: Failed to set hal version in the parameters", __func__);
5929 return BAD_VALUE;
5930 }
5931
5932 /*we need to update the frame number in the parameters*/
5933 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
5934 sizeof(request->frame_number), &(request->frame_number));
5935 if (rc < 0) {
5936 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5937 return BAD_VALUE;
5938 }
5939
5940 /* Update stream id of all the requested buffers */
5941 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
5942 sizeof(cam_stream_ID_t), &streamID);
5943
5944 if (rc < 0) {
5945 ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
5946 return BAD_VALUE;
5947 }
5948
5949 if(request->settings != NULL){
5950 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
5951 }
5952
5953 return rc;
5954 }
5955
5956 /*===========================================================================
5957 * FUNCTION : setReprocParameters
5958 *
5959 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
5960 * return it.
5961 *
5962 * PARAMETERS :
5963 * @request : request that needs to be serviced
5964 *
5965 * RETURN : success: NO_ERROR
5966 * failure:
5967 *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)5968 int32_t QCamera3HardwareInterface::setReprocParameters(
5969 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
5970 uint32_t snapshotStreamId)
5971 {
5972 /*translate from camera_metadata_t type to parm_type_t*/
5973 int rc = 0;
5974
5975 if (NULL == request->settings){
5976 ALOGE("%s: Reprocess settings cannot be NULL", __func__);
5977 return BAD_VALUE;
5978 }
5979
5980 if (NULL == reprocParam) {
5981 ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
5982 return BAD_VALUE;
5983 }
5984 memset(reprocParam, 0, sizeof(metadata_buffer_t));
5985
5986 /*we need to update the frame number in the parameters*/
5987 rc = AddSetParmEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
5988 sizeof(request->frame_number), &(request->frame_number));
5989 if (rc < 0) {
5990 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
5991 return rc;
5992 }
5993
5994 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
5995 if (rc < 0) {
5996 ALOGE("%s: Failed to translate reproc request", __func__);
5997 return rc;
5998 }
5999
6000 CameraMetadata frame_settings;
6001 frame_settings = request->settings;
6002 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
6003 frame_settings.exists(QCAMERA3_CROP_REPROCESS) &&
6004 frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) {
6005 int32_t *crop_count =
6006 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
6007 int32_t *crop_data =
6008 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
6009 int32_t *crop_stream_ids =
6010 frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32;
6011 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
6012 bool found = false;
6013 int32_t i;
6014 for (i = 0; i < *crop_count; i++) {
6015 if (crop_stream_ids[i] == (int32_t) request->input_buffer->stream) {
6016 found = true;
6017 break;
6018 }
6019 }
6020
6021 if (found) {
6022 cam_crop_data_t crop_meta;
6023 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
6024 crop_meta.num_of_streams = 1;
6025 crop_meta.crop_info[0].crop.left = crop_data[i*4];
6026 crop_meta.crop_info[0].crop.top = crop_data[i*4 + 1];
6027 crop_meta.crop_info[0].crop.width = crop_data[i*4 + 2];
6028 crop_meta.crop_info[0].crop.height = crop_data[i*4 + 3];
6029 rc = AddSetParmEntryToBatch(reprocParam,
6030 CAM_INTF_META_CROP_DATA,
6031 sizeof(cam_crop_data_t), &crop_meta);
6032 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
6033 __func__,
6034 request->input_buffer->stream,
6035 crop_meta.crop_info[0].crop.left,
6036 crop_meta.crop_info[0].crop.top,
6037 crop_meta.crop_info[0].crop.width,
6038 crop_meta.crop_info[0].crop.height);
6039 } else {
6040 ALOGE("%s: No matching reprocess input stream found!", __func__);
6041 }
6042 } else {
6043 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
6044 }
6045 }
6046
6047 return rc;
6048 }
6049
6050 /*===========================================================================
6051 * FUNCTION : translateToHalMetadata
6052 *
6053 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
6054 *
6055 *
6056 * PARAMETERS :
6057 * @request : request sent from framework
6058 *
6059 *
6060 * RETURN : success: NO_ERROR
6061 * failure:
6062 *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)6063 int QCamera3HardwareInterface::translateToHalMetadata
6064 (const camera3_capture_request_t *request,
6065 metadata_buffer_t *hal_metadata,
6066 uint32_t snapshotStreamId)
6067 {
6068 int rc = 0;
6069 CameraMetadata frame_settings;
6070 frame_settings = request->settings;
6071
6072 /* Do not change the order of the following list unless you know what you are
6073 * doing.
6074 * The order is laid out in such a way that parameters in the front of the table
6075 * may be used to override the parameters later in the table. Examples are:
6076 * 1. META_MODE should precede AEC/AWB/AF MODE
6077 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
6078 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
6079 * 4. Any mode should precede it's corresponding settings
6080 */
6081 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
6082 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
6083 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_MODE,
6084 sizeof(metaMode), &metaMode);
6085 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
6086 camera_metadata_entry entry = frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
6087 if (0 < entry.count) {
6088 uint8_t fwk_sceneMode = entry.data.u8[0];
6089 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
6090 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
6091 fwk_sceneMode);
6092 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
6093 sizeof(sceneMode), &sceneMode);
6094 }
6095 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
6096 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
6097 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
6098 sizeof(sceneMode), &sceneMode);
6099 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
6100 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
6101 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
6102 sizeof(sceneMode), &sceneMode);
6103 }
6104 }
6105
6106 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
6107 uint8_t fwk_aeMode =
6108 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
6109 uint8_t aeMode;
6110 int32_t redeye;
6111
6112 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
6113 aeMode = CAM_AE_MODE_OFF;
6114 } else {
6115 aeMode = CAM_AE_MODE_ON;
6116 }
6117 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
6118 redeye = 1;
6119 } else {
6120 redeye = 0;
6121 }
6122
6123 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
6124 sizeof(AE_FLASH_MODE_MAP),
6125 fwk_aeMode);
6126 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
6127 sizeof(aeMode), &aeMode);
6128 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
6129 sizeof(flashMode), &flashMode);
6130 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
6131 sizeof(redeye), &redeye);
6132 }
6133
6134 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
6135 uint8_t fwk_whiteLevel =
6136 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
6137 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
6138 sizeof(WHITE_BALANCE_MODES_MAP),
6139 fwk_whiteLevel);
6140 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
6141 sizeof(whiteLevel), &whiteLevel);
6142 }
6143
6144 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
6145 uint8_t fwk_cacMode =
6146 frame_settings.find(
6147 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
6148 int8_t val = lookupHalName(COLOR_ABERRATION_MAP,
6149 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
6150 fwk_cacMode);
6151 if (NAME_NOT_FOUND != val) {
6152 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
6153 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_CAC,
6154 sizeof(cacMode), &cacMode);
6155 } else {
6156 ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
6157 }
6158 }
6159
6160 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
6161 uint8_t fwk_focusMode =
6162 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
6163 uint8_t focusMode;
6164 focusMode = lookupHalName(FOCUS_MODES_MAP,
6165 sizeof(FOCUS_MODES_MAP),
6166 fwk_focusMode);
6167 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
6168 sizeof(focusMode), &focusMode);
6169 }
6170
6171 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
6172 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
6173 rc = AddSetParmEntryToBatch(hal_metadata,
6174 CAM_INTF_META_LENS_FOCUS_DISTANCE,
6175 sizeof(focalDistance), &focalDistance);
6176 }
6177
6178 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
6179 uint8_t fwk_antibandingMode =
6180 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
6181 int32_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
6182 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
6183 fwk_antibandingMode);
6184 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
6185 sizeof(hal_antibandingMode), &hal_antibandingMode);
6186 }
6187
6188 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
6189 int32_t expCompensation = frame_settings.find(
6190 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
6191 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
6192 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
6193 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
6194 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
6195 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
6196 sizeof(expCompensation), &expCompensation);
6197 }
6198
6199 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
6200 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
6201 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
6202 sizeof(aeLock), &aeLock);
6203 }
6204 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
6205 cam_fps_range_t fps_range;
6206 fps_range.min_fps =
6207 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
6208 fps_range.max_fps =
6209 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
6210 fps_range.video_min_fps = fps_range.min_fps;
6211 fps_range.video_max_fps = fps_range.max_fps;
6212 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
6213 sizeof(fps_range), &fps_range);
6214 }
6215
6216 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
6217 uint8_t awbLock =
6218 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
6219 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
6220 sizeof(awbLock), &awbLock);
6221 }
6222
6223 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
6224 uint8_t fwk_effectMode =
6225 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
6226 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
6227 sizeof(EFFECT_MODES_MAP),
6228 fwk_effectMode);
6229 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
6230 sizeof(effectMode), &effectMode);
6231 }
6232
6233 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
6234 uint8_t colorCorrectMode =
6235 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
6236 rc =
6237 AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
6238 sizeof(colorCorrectMode), &colorCorrectMode);
6239 }
6240
6241 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
6242 cam_color_correct_gains_t colorCorrectGains;
6243 for (int i = 0; i < 4; i++) {
6244 colorCorrectGains.gains[i] =
6245 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
6246 }
6247 rc =
6248 AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
6249 sizeof(colorCorrectGains), &colorCorrectGains);
6250 }
6251
6252 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
6253 cam_color_correct_matrix_t colorCorrectTransform;
6254 cam_rational_type_t transform_elem;
6255 int num = 0;
6256 for (int i = 0; i < 3; i++) {
6257 for (int j = 0; j < 3; j++) {
6258 transform_elem.numerator =
6259 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
6260 transform_elem.denominator =
6261 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
6262 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
6263 num++;
6264 }
6265 }
6266 rc =
6267 AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
6268 sizeof(colorCorrectTransform), &colorCorrectTransform);
6269 }
6270
6271 cam_trigger_t aecTrigger;
6272 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
6273 aecTrigger.trigger_id = -1;
6274 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
6275 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
6276 aecTrigger.trigger =
6277 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
6278 aecTrigger.trigger_id =
6279 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
6280 rc = AddSetParmEntryToBatch(hal_metadata,
6281 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
6282 sizeof(aecTrigger), &aecTrigger);
6283 }
6284 /*af_trigger must come with a trigger id*/
6285 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
6286 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
6287 cam_trigger_t af_trigger;
6288 af_trigger.trigger =
6289 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
6290 af_trigger.trigger_id =
6291 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
6292 rc = AddSetParmEntryToBatch(hal_metadata,
6293 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
6294 }
6295
6296 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
6297 int32_t demosaic =
6298 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
6299 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
6300 sizeof(demosaic), &demosaic);
6301 }
6302
6303 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
6304 cam_edge_application_t edge_application;
6305 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
6306 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
6307 edge_application.sharpness = 0;
6308 } else {
6309 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
6310 uint8_t edgeStrength =
6311 frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
6312 edge_application.sharpness = (int32_t)edgeStrength;
6313 } else {
6314 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
6315 }
6316 }
6317 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
6318 sizeof(edge_application), &edge_application);
6319 }
6320
6321 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
6322 int32_t respectFlashMode = 1;
6323 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
6324 uint8_t fwk_aeMode =
6325 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
6326 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
6327 respectFlashMode = 0;
6328 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
6329 __func__);
6330 }
6331 }
6332 if (respectFlashMode) {
6333 uint8_t flashMode =
6334 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
6335 flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
6336 sizeof(FLASH_MODES_MAP),
6337 flashMode);
6338 CDBG_HIGH("%s: flash mode after mapping %d", __func__, flashMode);
6339 // To check: CAM_INTF_META_FLASH_MODE usage
6340 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
6341 sizeof(flashMode), &flashMode);
6342 }
6343 }
6344
6345 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
6346 uint8_t flashPower =
6347 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
6348 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
6349 sizeof(flashPower), &flashPower);
6350 }
6351
6352 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
6353 int64_t flashFiringTime =
6354 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
6355 rc = AddSetParmEntryToBatch(hal_metadata,
6356 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
6357 }
6358
6359 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
6360 uint8_t hotPixelMode =
6361 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
6362 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
6363 sizeof(hotPixelMode), &hotPixelMode);
6364 }
6365
6366 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
6367 float lensAperture =
6368 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
6369 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
6370 sizeof(lensAperture), &lensAperture);
6371 }
6372
6373 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
6374 float filterDensity =
6375 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
6376 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
6377 sizeof(filterDensity), &filterDensity);
6378 }
6379
6380 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
6381 float focalLength =
6382 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
6383 rc = AddSetParmEntryToBatch(hal_metadata,
6384 CAM_INTF_META_LENS_FOCAL_LENGTH,
6385 sizeof(focalLength), &focalLength);
6386 }
6387
6388 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
6389 uint8_t optStabMode =
6390 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
6391 rc = AddSetParmEntryToBatch(hal_metadata,
6392 CAM_INTF_META_LENS_OPT_STAB_MODE,
6393 sizeof(optStabMode), &optStabMode);
6394 }
6395
6396 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
6397 uint8_t noiseRedMode =
6398 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
6399 rc = AddSetParmEntryToBatch(hal_metadata,
6400 CAM_INTF_META_NOISE_REDUCTION_MODE,
6401 sizeof(noiseRedMode), &noiseRedMode);
6402 }
6403
6404 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
6405 uint8_t noiseRedStrength =
6406 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
6407 rc = AddSetParmEntryToBatch(hal_metadata,
6408 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
6409 sizeof(noiseRedStrength), &noiseRedStrength);
6410 }
6411
6412 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
6413 float reprocessEffectiveExposureFactor =
6414 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
6415 rc = AddSetParmEntryToBatch(hal_metadata,
6416 CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
6417 sizeof(reprocessEffectiveExposureFactor), &reprocessEffectiveExposureFactor);
6418 }
6419
6420 cam_crop_region_t scalerCropRegion;
6421 bool scalerCropSet = false;
6422 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
6423 scalerCropRegion.left =
6424 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
6425 scalerCropRegion.top =
6426 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
6427 scalerCropRegion.width =
6428 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
6429 scalerCropRegion.height =
6430 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
6431 rc = AddSetParmEntryToBatch(hal_metadata,
6432 CAM_INTF_META_SCALER_CROP_REGION,
6433 sizeof(scalerCropRegion), &scalerCropRegion);
6434 scalerCropSet = true;
6435 }
6436
6437 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
6438 int64_t sensorExpTime =
6439 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
6440 CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
6441 rc = AddSetParmEntryToBatch(hal_metadata,
6442 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
6443 sizeof(sensorExpTime), &sensorExpTime);
6444 }
6445
6446 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
6447 int64_t sensorFrameDuration =
6448 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
6449 int64_t minFrameDuration = getMinFrameDuration(request);
6450 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
6451 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
6452 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
6453 CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
6454 rc = AddSetParmEntryToBatch(hal_metadata,
6455 CAM_INTF_META_SENSOR_FRAME_DURATION,
6456 sizeof(sensorFrameDuration), &sensorFrameDuration);
6457 }
6458
6459 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
6460 int32_t sensorSensitivity =
6461 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
6462 if (sensorSensitivity <
6463 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
6464 sensorSensitivity =
6465 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
6466 if (sensorSensitivity >
6467 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
6468 sensorSensitivity =
6469 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
6470 CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
6471 rc = AddSetParmEntryToBatch(hal_metadata,
6472 CAM_INTF_META_SENSOR_SENSITIVITY,
6473 sizeof(sensorSensitivity), &sensorSensitivity);
6474 }
6475
6476 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
6477 uint8_t shadingMode =
6478 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
6479 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
6480 sizeof(shadingMode), &shadingMode);
6481 }
6482
6483 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
6484 uint8_t shadingStrength =
6485 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
6486 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
6487 sizeof(shadingStrength), &shadingStrength);
6488 }
6489
6490 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
6491 uint8_t fwk_facedetectMode =
6492 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
6493 uint8_t facedetectMode =
6494 lookupHalName(FACEDETECT_MODES_MAP,
6495 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
6496 rc = AddSetParmEntryToBatch(hal_metadata,
6497 CAM_INTF_META_STATS_FACEDETECT_MODE,
6498 sizeof(facedetectMode), &facedetectMode);
6499 }
6500
6501 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
6502 uint8_t histogramMode =
6503 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
6504 rc = AddSetParmEntryToBatch(hal_metadata,
6505 CAM_INTF_META_STATS_HISTOGRAM_MODE,
6506 sizeof(histogramMode), &histogramMode);
6507 }
6508
6509 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
6510 uint8_t sharpnessMapMode =
6511 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
6512 rc = AddSetParmEntryToBatch(hal_metadata,
6513 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
6514 sizeof(sharpnessMapMode), &sharpnessMapMode);
6515 }
6516
6517 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
6518 uint8_t tonemapMode =
6519 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
6520 rc = AddSetParmEntryToBatch(hal_metadata,
6521 CAM_INTF_META_TONEMAP_MODE,
6522 sizeof(tonemapMode), &tonemapMode);
6523 }
6524 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
6525 /*All tonemap channels will have the same number of points*/
6526 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
6527 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
6528 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
6529 cam_rgb_tonemap_curves tonemapCurves;
6530 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
6531 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6532 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
6533 __func__, tonemapCurves.tonemap_points_cnt,
6534 CAM_MAX_TONEMAP_CURVE_SIZE);
6535 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6536 }
6537
6538 /* ch0 = G*/
6539 int point = 0;
6540 cam_tonemap_curve_t tonemapCurveGreen;
6541 for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
6542 for (int j = 0; j < 2; j++) {
6543 tonemapCurveGreen.tonemap_points[i][j] =
6544 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
6545 point++;
6546 }
6547 }
6548 tonemapCurves.curves[0] = tonemapCurveGreen;
6549
6550 /* ch 1 = B */
6551 point = 0;
6552 cam_tonemap_curve_t tonemapCurveBlue;
6553 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6554 for (int j = 0; j < 2; j++) {
6555 tonemapCurveBlue.tonemap_points[i][j] =
6556 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
6557 point++;
6558 }
6559 }
6560 tonemapCurves.curves[1] = tonemapCurveBlue;
6561
6562 /* ch 2 = R */
6563 point = 0;
6564 cam_tonemap_curve_t tonemapCurveRed;
6565 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
6566 for (int j = 0; j < 2; j++) {
6567 tonemapCurveRed.tonemap_points[i][j] =
6568 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
6569 point++;
6570 }
6571 }
6572 tonemapCurves.curves[2] = tonemapCurveRed;
6573
6574 rc = AddSetParmEntryToBatch(hal_metadata,
6575 CAM_INTF_META_TONEMAP_CURVES,
6576 sizeof(tonemapCurves), &tonemapCurves);
6577 }
6578
6579 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
6580 uint8_t captureIntent =
6581 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
6582 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
6583 sizeof(captureIntent), &captureIntent);
6584 }
6585
6586 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
6587 uint8_t blackLevelLock =
6588 frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
6589 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
6590 sizeof(blackLevelLock), &blackLevelLock);
6591 }
6592
6593 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
6594 uint8_t lensShadingMapMode =
6595 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
6596 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
6597 sizeof(lensShadingMapMode), &lensShadingMapMode);
6598 }
6599
6600 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
6601 cam_area_t roi;
6602 bool reset = true;
6603 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
6604 if (scalerCropSet) {
6605 reset = resetIfNeededROI(&roi, &scalerCropRegion);
6606 }
6607 if (reset) {
6608 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
6609 sizeof(roi), &roi);
6610 }
6611 }
6612
6613 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
6614 cam_area_t roi;
6615 bool reset = true;
6616 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
6617 if (scalerCropSet) {
6618 reset = resetIfNeededROI(&roi, &scalerCropRegion);
6619 }
6620 if (reset) {
6621 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
6622 sizeof(roi), &roi);
6623 }
6624 }
6625
6626 // CDS
6627 if (frame_settings.exists(QCAMERA3_CDS_MODE)) {
6628 int32_t* cds =
6629 frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
6630 if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) {
6631 ALOGE("%s: Invalid CDS mode %d!", __func__, *cds);
6632 } else {
6633 cam_cds_mode_type_t mode = (cam_cds_mode_type_t) *cds;
6634 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_CDS_MODE,
6635 sizeof(mode), &mode);
6636 }
6637 }
6638
6639 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
6640 cam_test_pattern_data_t testPatternData;
6641 uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
6642 uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
6643 sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
6644
6645 memset(&testPatternData, 0, sizeof(testPatternData));
6646 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
6647 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
6648 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
6649 int32_t* fwk_testPatternData = frame_settings.find(
6650 ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
6651 testPatternData.r = fwk_testPatternData[0];
6652 testPatternData.b = fwk_testPatternData[3];
6653 switch (gCamCapability[mCameraId]->color_arrangement) {
6654 case CAM_FILTER_ARRANGEMENT_RGGB:
6655 case CAM_FILTER_ARRANGEMENT_GRBG:
6656 testPatternData.gr = fwk_testPatternData[1];
6657 testPatternData.gb = fwk_testPatternData[2];
6658 break;
6659 case CAM_FILTER_ARRANGEMENT_GBRG:
6660 case CAM_FILTER_ARRANGEMENT_BGGR:
6661 testPatternData.gr = fwk_testPatternData[2];
6662 testPatternData.gb = fwk_testPatternData[1];
6663 break;
6664 default:
6665 ALOGE("%s: color arrangement %d is not supported", __func__,
6666 gCamCapability[mCameraId]->color_arrangement);
6667 break;
6668 }
6669 }
6670 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
6671 sizeof(testPatternData), &testPatternData);
6672 }
6673
6674 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
6675 double *gps_coords =
6676 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
6677 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
6678 }
6679
6680 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
6681 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
6682 const char *gps_methods_src = (const char *)
6683 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
6684 memset(gps_methods, '\0', sizeof(gps_methods));
6685 strncpy(gps_methods, gps_methods_src, sizeof(gps_methods)-1);
6686 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
6687 }
6688
6689 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
6690 int64_t gps_timestamp =
6691 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
6692 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
6693 }
6694
6695 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6696 int32_t orientation =
6697 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6698 cam_rotation_info_t rotation_info;
6699 if (orientation == 0) {
6700 rotation_info.rotation = ROTATE_0;
6701 } else if (orientation == 90) {
6702 rotation_info.rotation = ROTATE_90;
6703 } else if (orientation == 180) {
6704 rotation_info.rotation = ROTATE_180;
6705 } else if (orientation == 270) {
6706 rotation_info.rotation = ROTATE_270;
6707 }
6708 rotation_info.streamId = snapshotStreamId;
6709 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
6710 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ROTATION, sizeof(rotation_info), &rotation_info);
6711 }
6712
6713 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
6714 int8_t quality =
6715 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
6716 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
6717 }
6718
6719 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
6720 int8_t thumb_quality =
6721 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
6722 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
6723 }
6724
6725 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6726 cam_dimension_t dim;
6727 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6728 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6729 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
6730 }
6731
6732 // Internal metadata
6733 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
6734 int32_t* privatedata =
6735 frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.i32;
6736 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
6737 sizeof(int32_t) * MAX_METADATA_PRIVATE_PAYLOAD_SIZE, privatedata);
6738 }
6739
6740 // EV step
6741 rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
6742 sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
6743
6744 return rc;
6745 }
6746
6747 /*===========================================================================
6748 * FUNCTION : captureResultCb
6749 *
6750 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
6751 *
6752 * PARAMETERS :
6753 * @frame : frame information from mm-camera-interface
6754 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
6755 * @userdata: userdata
6756 *
6757 * RETURN : NONE
6758 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,void * userdata)6759 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
6760 camera3_stream_buffer_t *buffer,
6761 uint32_t frame_number, void *userdata)
6762 {
6763 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
6764 if (hw == NULL) {
6765 ALOGE("%s: Invalid hw %p", __func__, hw);
6766 return;
6767 }
6768
6769 hw->captureResultCb(metadata, buffer, frame_number);
6770 return;
6771 }
6772
6773
6774 /*===========================================================================
6775 * FUNCTION : initialize
6776 *
6777 * DESCRIPTION: Pass framework callback pointers to HAL
6778 *
6779 * PARAMETERS :
6780 *
6781 *
6782 * RETURN : Success : 0
6783 * Failure: -ENODEV
6784 *==========================================================================*/
6785
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)6786 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
6787 const camera3_callback_ops_t *callback_ops)
6788 {
6789 CDBG("%s: E", __func__);
6790 QCamera3HardwareInterface *hw =
6791 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6792 if (!hw) {
6793 ALOGE("%s: NULL camera device", __func__);
6794 return -ENODEV;
6795 }
6796
6797 int rc = hw->initialize(callback_ops);
6798 CDBG("%s: X", __func__);
6799 return rc;
6800 }
6801
6802 /*===========================================================================
6803 * FUNCTION : configure_streams
6804 *
6805 * DESCRIPTION:
6806 *
6807 * PARAMETERS :
6808 *
6809 *
6810 * RETURN : Success: 0
6811 * Failure: -EINVAL (if stream configuration is invalid)
6812 * -ENODEV (fatal error)
6813 *==========================================================================*/
6814
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)6815 int QCamera3HardwareInterface::configure_streams(
6816 const struct camera3_device *device,
6817 camera3_stream_configuration_t *stream_list)
6818 {
6819 CDBG("%s: E", __func__);
6820 QCamera3HardwareInterface *hw =
6821 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6822 if (!hw) {
6823 ALOGE("%s: NULL camera device", __func__);
6824 return -ENODEV;
6825 }
6826 int rc = hw->configureStreams(stream_list);
6827 CDBG("%s: X", __func__);
6828 return rc;
6829 }
6830
6831 /*===========================================================================
6832 * FUNCTION : construct_default_request_settings
6833 *
6834 * DESCRIPTION: Configure a settings buffer to meet the required use case
6835 *
6836 * PARAMETERS :
6837 *
6838 *
6839 * RETURN : Success: Return valid metadata
6840 * Failure: Return NULL
6841 *==========================================================================*/
6842 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)6843 construct_default_request_settings(const struct camera3_device *device,
6844 int type)
6845 {
6846
6847 CDBG("%s: E", __func__);
6848 camera_metadata_t* fwk_metadata = NULL;
6849 QCamera3HardwareInterface *hw =
6850 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6851 if (!hw) {
6852 ALOGE("%s: NULL camera device", __func__);
6853 return NULL;
6854 }
6855
6856 fwk_metadata = hw->translateCapabilityToMetadata(type);
6857
6858 CDBG("%s: X", __func__);
6859 return fwk_metadata;
6860 }
6861
6862 /*===========================================================================
6863 * FUNCTION : process_capture_request
6864 *
6865 * DESCRIPTION:
6866 *
6867 * PARAMETERS :
6868 *
6869 *
6870 * RETURN :
6871 *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)6872 int QCamera3HardwareInterface::process_capture_request(
6873 const struct camera3_device *device,
6874 camera3_capture_request_t *request)
6875 {
6876 CDBG("%s: E", __func__);
6877 QCamera3HardwareInterface *hw =
6878 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6879 if (!hw) {
6880 ALOGE("%s: NULL camera device", __func__);
6881 return -EINVAL;
6882 }
6883
6884 int rc = hw->processCaptureRequest(request);
6885 CDBG("%s: X", __func__);
6886 return rc;
6887 }
6888
6889 /*===========================================================================
6890 * FUNCTION : dump
6891 *
6892 * DESCRIPTION:
6893 *
6894 * PARAMETERS :
6895 *
6896 *
6897 * RETURN :
6898 *==========================================================================*/
6899
dump(const struct camera3_device * device,int fd)6900 void QCamera3HardwareInterface::dump(
6901 const struct camera3_device *device, int fd)
6902 {
6903 /* Log level property is read when "adb shell dumpsys media.camera" is
6904 called so that the log level can be controlled without restarting
6905 the media server */
6906 getLogLevel();
6907
6908 CDBG("%s: E", __func__);
6909 QCamera3HardwareInterface *hw =
6910 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6911 if (!hw) {
6912 ALOGE("%s: NULL camera device", __func__);
6913 return;
6914 }
6915
6916 hw->dump(fd);
6917 CDBG("%s: X", __func__);
6918 return;
6919 }
6920
6921 /*===========================================================================
6922 * FUNCTION : flush
6923 *
6924 * DESCRIPTION:
6925 *
6926 * PARAMETERS :
6927 *
6928 *
6929 * RETURN :
6930 *==========================================================================*/
6931
flush(const struct camera3_device * device)6932 int QCamera3HardwareInterface::flush(
6933 const struct camera3_device *device)
6934 {
6935 int rc;
6936 CDBG("%s: E", __func__);
6937 QCamera3HardwareInterface *hw =
6938 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
6939 if (!hw) {
6940 ALOGE("%s: NULL camera device", __func__);
6941 return -EINVAL;
6942 }
6943
6944 rc = hw->flush();
6945 CDBG("%s: X", __func__);
6946 return rc;
6947 }
6948
6949 /*===========================================================================
6950 * FUNCTION : close_camera_device
6951 *
6952 * DESCRIPTION:
6953 *
6954 * PARAMETERS :
6955 *
6956 *
6957 * RETURN :
6958 *==========================================================================*/
close_camera_device(struct hw_device_t * device)6959 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
6960 {
6961 CDBG("%s: E", __func__);
6962 int ret = NO_ERROR;
6963 QCamera3HardwareInterface *hw =
6964 reinterpret_cast<QCamera3HardwareInterface *>(
6965 reinterpret_cast<camera3_device_t *>(device)->priv);
6966 if (!hw) {
6967 ALOGE("NULL camera device");
6968 return BAD_VALUE;
6969 }
6970 delete hw;
6971
6972 CDBG("%s: X", __func__);
6973 return ret;
6974 }
6975
6976 /*===========================================================================
6977 * FUNCTION : getWaveletDenoiseProcessPlate
6978 *
6979 * DESCRIPTION: query wavelet denoise process plate
6980 *
6981 * PARAMETERS : None
6982 *
6983 * RETURN : WNR prcocess plate vlaue
6984 *==========================================================================*/
getWaveletDenoiseProcessPlate()6985 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
6986 {
6987 char prop[PROPERTY_VALUE_MAX];
6988 memset(prop, 0, sizeof(prop));
6989 property_get("persist.denoise.process.plates", prop, "0");
6990 int processPlate = atoi(prop);
6991 switch(processPlate) {
6992 case 0:
6993 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
6994 case 1:
6995 return CAM_WAVELET_DENOISE_CBCR_ONLY;
6996 case 2:
6997 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
6998 case 3:
6999 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
7000 default:
7001 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
7002 }
7003 }
7004
7005 /*===========================================================================
7006 * FUNCTION : needRotationReprocess
7007 *
7008 * DESCRIPTION: if rotation needs to be done by reprocess in pp
7009 *
7010 * PARAMETERS : none
7011 *
7012 * RETURN : true: needed
7013 * false: no need
7014 *==========================================================================*/
needRotationReprocess()7015 bool QCamera3HardwareInterface::needRotationReprocess()
7016 {
7017 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
7018 // current rotation is not zero, and pp has the capability to process rotation
7019 CDBG_HIGH("%s: need do reprocess for rotation", __func__);
7020 return true;
7021 }
7022
7023 return false;
7024 }
7025
7026 /*===========================================================================
7027 * FUNCTION : needReprocess
7028 *
7029 * DESCRIPTION: if reprocess in needed
7030 *
7031 * PARAMETERS : none
7032 *
7033 * RETURN : true: needed
7034 * false: no need
7035 *==========================================================================*/
needReprocess(uint32_t postprocess_mask)7036 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
7037 {
7038 if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
7039 // TODO: add for ZSL HDR later
7040 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
7041 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
7042 CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
7043 return true;
7044 } else {
7045 CDBG_HIGH("%s: already post processed frame", __func__);
7046 return false;
7047 }
7048 }
7049 return needRotationReprocess();
7050 }
7051
7052 /*===========================================================================
7053 * FUNCTION : needJpegRotation
7054 *
7055 * DESCRIPTION: if rotation from jpeg is needed
7056 *
7057 * PARAMETERS : none
7058 *
7059 * RETURN : true: needed
7060 * false: no need
7061 *==========================================================================*/
needJpegRotation()7062 bool QCamera3HardwareInterface::needJpegRotation()
7063 {
7064 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
7065 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
7066 CDBG("%s: Need Jpeg to do the rotation", __func__);
7067 return true;
7068 }
7069 return false;
7070 }
7071
7072 /*===========================================================================
7073 * FUNCTION : addOfflineReprocChannel
7074 *
7075 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
7076 * coming from input channel
7077 *
7078 * PARAMETERS :
7079 * @config : reprocess configuration
7080 *
7081 *
7082 * RETURN : Ptr to the newly created channel obj. NULL if failed.
7083 *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3PicChannel * picChHandle,metadata_buffer_t * metadata)7084 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
7085 const reprocess_config_t &config, QCamera3PicChannel *picChHandle,
7086 metadata_buffer_t *metadata)
7087 {
7088 int32_t rc = NO_ERROR;
7089 QCamera3ReprocessChannel *pChannel = NULL;
7090
7091 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
7092 mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, picChHandle);
7093 if (NULL == pChannel) {
7094 ALOGE("%s: no mem for reprocess channel", __func__);
7095 return NULL;
7096 }
7097
7098 rc = pChannel->initialize(IS_TYPE_NONE);
7099 if (rc != NO_ERROR) {
7100 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
7101 delete pChannel;
7102 return NULL;
7103 }
7104
7105 // pp feature config
7106 cam_pp_feature_config_t pp_config;
7107 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
7108
7109 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET;
7110
7111 rc = pChannel->addReprocStreamsFromSource(pp_config,
7112 config,
7113 IS_TYPE_NONE,
7114 mMetadataChannel);
7115
7116 if (rc != NO_ERROR) {
7117 delete pChannel;
7118 return NULL;
7119 }
7120 return pChannel;
7121 }
7122
7123
isCACEnabled()7124 bool QCamera3HardwareInterface::isCACEnabled() {
7125 char prop[PROPERTY_VALUE_MAX];
7126 memset(prop, 0, sizeof(prop));
7127 property_get("persist.camera.feature.cac", prop, "0");
7128 int enableCAC = atoi(prop);
7129 return enableCAC;
7130 }
7131 /*===========================================================================
7132 * FUNCTION : getLogLevel
7133 *
7134 * DESCRIPTION: Reads the log level property into a variable
7135 *
7136 * PARAMETERS :
7137 * None
7138 *
7139 * RETURN :
7140 * None
7141 *==========================================================================*/
getLogLevel()7142 void QCamera3HardwareInterface::getLogLevel()
7143 {
7144 char prop[PROPERTY_VALUE_MAX];
7145
7146 property_get("persist.camera.logs", prop, "0");
7147 gCamHal3LogLevel = atoi(prop);
7148
7149 return;
7150 }
7151
7152
7153 /*===========================================================================
7154 * FUNCTION : getFlashInfo
7155 *
7156 * DESCRIPTION: Retrieve information about whether the device has a flash.
7157 *
7158 * PARAMETERS :
7159 * @cameraId : Camera id to query
7160 * @hasFlash : Boolean indicating whether there is a flash device
7161 * associated with given camera
7162 * @flashNode : If a flash device exists, this will be its device node.
7163 *
7164 * RETURN :
7165 * None
7166 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])7167 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
7168 bool& hasFlash,
7169 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
7170 {
7171 cam_capability_t* camCapability = gCamCapability[cameraId];
7172 if (NULL == camCapability) {
7173 hasFlash = false;
7174 flashNode[0] = '\0';
7175 } else {
7176 hasFlash = camCapability->flash_available;
7177 strlcpy(flashNode,
7178 (char*)camCapability->flash_dev_name,
7179 QCAMERA_MAX_FILEPATH_LENGTH);
7180 }
7181 }
7182 }; //end namespace qcamera
7183