1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32
33 #define __STDC_LIMIT_MACROS
34 #include <cutils/properties.h>
35 #include <hardware/camera3.h>
36 #include <camera/CameraMetadata.h>
37 #include <stdlib.h>
38 #include <fcntl.h>
39 #include <stdint.h>
40 #include <utils/Log.h>
41 #include <utils/Errors.h>
42 #include <ui/Fence.h>
43 #include <gralloc_priv.h>
44 #include "QCamera3HWI.h"
45 #include "QCamera3Mem.h"
46 #include "QCamera3Channel.h"
47 #include "QCamera3PostProc.h"
48 #include "QCamera3VendorTags.h"
49
50 using namespace android;
51
52 namespace qcamera {
53
54 #define MAX(a, b) ((a) > (b) ? (a) : (b))
55
56 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
57
58 #define EMPTY_PIPELINE_DELAY 2
59
60 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
61 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
62
63 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
64 PTHREAD_MUTEX_INITIALIZER;
65 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
66
67 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
68 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
69 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
70 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
71 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
72 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
73 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
74 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
75 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
76 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
77 };
78
79 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
80 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
81 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
82 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
83 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
84 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
85 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
86 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
87 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
88 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
89 };
90
91 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
92 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_OFF },
93 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
94 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
95 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
96 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
97 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
98 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
99 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
100 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
101 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
102 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
103 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
104 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
105 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
106 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
107 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
108 };
109
110 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
111 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
112 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
113 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
114 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
115 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
116 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
117 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
118 };
119
120 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
121 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
122 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
123 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
124 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
125 };
126
127 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
128 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
129 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
130 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
131 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
132 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
133 };
134
135 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
136 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
137 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
138 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
139 };
140
141 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
142 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
143 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
144 };
145
146 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
147 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
148 CAM_FOCUS_UNCALIBRATED },
149 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
150 CAM_FOCUS_APPROXIMATE },
151 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
152 CAM_FOCUS_CALIBRATED }
153 };
154
155 const int32_t available_thumbnail_sizes[] = {0, 0,
156 176, 144,
157 320, 240,
158 432, 288,
159 480, 288,
160 512, 288,
161 512, 384};
162
163 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
164 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
165 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
166 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
167 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
168 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
169 };
170
171 /* Since there is no mapping for all the options some Android enum are not listed.
172 * Also, the order in this list is important because while mapping from HAL to Android it will
173 * traverse from lower to higher index which means that for HAL values that are map to different
174 * Android values, the traverse logic will select the first one found.
175 */
176 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
177 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
178 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
179 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
180 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
181 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
182 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
183 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
184 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
185 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
186 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
187 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
188 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
189 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
190 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
191 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
192 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
193 };
194
195 /* Custom tag definitions */
196
197 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
198 initialize: QCamera3HardwareInterface::initialize,
199 configure_streams: QCamera3HardwareInterface::configure_streams,
200 register_stream_buffers: NULL,
201 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
202 process_capture_request: QCamera3HardwareInterface::process_capture_request,
203 get_metadata_vendor_tag_ops: NULL,
204 dump: QCamera3HardwareInterface::dump,
205 flush: QCamera3HardwareInterface::flush,
206 reserved: {0},
207 };
208
209 int QCamera3HardwareInterface::kMaxInFlight = 5;
210
211 /*===========================================================================
212 * FUNCTION : QCamera3HardwareInterface
213 *
214 * DESCRIPTION: constructor of QCamera3HardwareInterface
215 *
216 * PARAMETERS :
217 * @cameraId : camera ID
218 *
219 * RETURN : none
220 *==========================================================================*/
QCamera3HardwareInterface(int cameraId,const camera_module_callbacks_t * callbacks)221 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
222 const camera_module_callbacks_t *callbacks)
223 : mCameraId(cameraId),
224 mCameraHandle(NULL),
225 mCameraOpened(false),
226 mCameraInitialized(false),
227 mCallbackOps(NULL),
228 mInputStream(NULL),
229 mMetadataChannel(NULL),
230 mPictureChannel(NULL),
231 mRawChannel(NULL),
232 mSupportChannel(NULL),
233 mFirstRequest(false),
234 mRepeatingRequest(false),
235 mParamHeap(NULL),
236 mParameters(NULL),
237 mPrevParameters(NULL),
238 mLoopBackResult(NULL),
239 mMinProcessedFrameDuration(0),
240 mMinJpegFrameDuration(0),
241 mMinRawFrameDuration(0),
242 m_pPowerModule(NULL),
243 mHdrHint(false),
244 mMetaFrameCount(0),
245 mCallbacks(callbacks)
246 {
247 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
248 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
249 mCameraDevice.common.close = close_camera_device;
250 mCameraDevice.ops = &mCameraOps;
251 mCameraDevice.priv = this;
252 gCamCapability[cameraId]->version = CAM_HAL_V3;
253 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
254 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
255 gCamCapability[cameraId]->min_num_pp_bufs = 3;
256
257 pthread_cond_init(&mRequestCond, NULL);
258 mPendingRequest = 0;
259 mCurrentRequestId = -1;
260 pthread_mutex_init(&mMutex, NULL);
261
262 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
263 mDefaultMetadata[i] = NULL;
264
265 #ifdef HAS_MULTIMEDIA_HINTS
266 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
267 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
268 }
269 #endif
270 }
271
272 /*===========================================================================
273 * FUNCTION : ~QCamera3HardwareInterface
274 *
275 * DESCRIPTION: destructor of QCamera3HardwareInterface
276 *
277 * PARAMETERS : none
278 *
279 * RETURN : none
280 *==========================================================================*/
~QCamera3HardwareInterface()281 QCamera3HardwareInterface::~QCamera3HardwareInterface()
282 {
283 ALOGV("%s: E", __func__);
284 /* We need to stop all streams before deleting any stream */
285
286 // NOTE: 'camera3_stream_t *' objects are already freed at
287 // this stage by the framework
288 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
289 it != mStreamInfo.end(); it++) {
290 QCamera3Channel *channel = (*it)->channel;
291 if (channel) {
292 channel->stop();
293 }
294 }
295 if (mSupportChannel)
296 mSupportChannel->stop();
297
298 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
299 it != mStreamInfo.end(); it++) {
300 QCamera3Channel *channel = (*it)->channel;
301 if (channel)
302 delete channel;
303 free (*it);
304 }
305 if (mSupportChannel) {
306 delete mSupportChannel;
307 mSupportChannel = NULL;
308 }
309
310 mPictureChannel = NULL;
311
312 /* Clean up all channels */
313 if (mCameraInitialized) {
314 if (mMetadataChannel) {
315 mMetadataChannel->stop();
316 delete mMetadataChannel;
317 mMetadataChannel = NULL;
318 }
319 deinitParameters();
320 }
321
322 if (mCameraOpened)
323 closeCamera();
324
325 mPendingBuffersMap.mPendingBufferList.clear();
326 mPendingRequestsList.clear();
327
328 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
329 if (mDefaultMetadata[i])
330 free_camera_metadata(mDefaultMetadata[i]);
331
332 pthread_cond_destroy(&mRequestCond);
333
334 pthread_mutex_destroy(&mMutex);
335 ALOGV("%s: X", __func__);
336 }
337
338 /*===========================================================================
339 * FUNCTION : openCamera
340 *
341 * DESCRIPTION: open camera
342 *
343 * PARAMETERS :
344 * @hw_device : double ptr for camera device struct
345 *
346 * RETURN : int32_t type of status
347 * NO_ERROR -- success
348 * none-zero failure code
349 *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)350 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
351 {
352 int rc = 0;
353 pthread_mutex_lock(&mCameraSessionLock);
354 if (mCameraSessionActive) {
355 ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
356 pthread_mutex_unlock(&mCameraSessionLock);
357 return -EUSERS;
358 }
359
360 if (mCameraOpened) {
361 *hw_device = NULL;
362 return PERMISSION_DENIED;
363 }
364
365 rc = openCamera();
366 if (rc == 0) {
367 *hw_device = &mCameraDevice.common;
368 mCameraSessionActive = 1;
369 } else
370 *hw_device = NULL;
371
372 #ifdef HAS_MULTIMEDIA_HINTS
373 if (rc == 0) {
374 if (m_pPowerModule) {
375 if (m_pPowerModule->powerHint) {
376 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
377 (void *)"state=1");
378 }
379 }
380 }
381 #endif
382 pthread_mutex_unlock(&mCameraSessionLock);
383 return rc;
384 }
385
386 /*===========================================================================
387 * FUNCTION : openCamera
388 *
389 * DESCRIPTION: open camera
390 *
391 * PARAMETERS : none
392 *
393 * RETURN : int32_t type of status
394 * NO_ERROR -- success
395 * none-zero failure code
396 *==========================================================================*/
openCamera()397 int QCamera3HardwareInterface::openCamera()
398 {
399 if (mCameraHandle) {
400 ALOGE("Failure: Camera already opened");
401 return ALREADY_EXISTS;
402 }
403 mCameraHandle = camera_open(mCameraId);
404 if (!mCameraHandle) {
405 ALOGE("camera_open failed.");
406 return UNKNOWN_ERROR;
407 }
408
409 mCameraOpened = true;
410
411 return NO_ERROR;
412 }
413
414 /*===========================================================================
415 * FUNCTION : closeCamera
416 *
417 * DESCRIPTION: close camera
418 *
419 * PARAMETERS : none
420 *
421 * RETURN : int32_t type of status
422 * NO_ERROR -- success
423 * none-zero failure code
424 *==========================================================================*/
closeCamera()425 int QCamera3HardwareInterface::closeCamera()
426 {
427 int rc = NO_ERROR;
428
429 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
430 mCameraHandle = NULL;
431 mCameraOpened = false;
432
433 #ifdef HAS_MULTIMEDIA_HINTS
434 if (rc == NO_ERROR) {
435 if (m_pPowerModule) {
436 if (m_pPowerModule->powerHint) {
437 if(mHdrHint == true) {
438 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
439 (void *)"state=3");
440 mHdrHint = false;
441 }
442 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
443 (void *)"state=0");
444 }
445 }
446 }
447 #endif
448
449 return rc;
450 }
451
452 /*===========================================================================
453 * FUNCTION : initialize
454 *
455 * DESCRIPTION: Initialize frameworks callback functions
456 *
457 * PARAMETERS :
458 * @callback_ops : callback function to frameworks
459 *
460 * RETURN :
461 *
462 *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)463 int QCamera3HardwareInterface::initialize(
464 const struct camera3_callback_ops *callback_ops)
465 {
466 int rc;
467
468 pthread_mutex_lock(&mMutex);
469
470 rc = initParameters();
471 if (rc < 0) {
472 ALOGE("%s: initParamters failed %d", __func__, rc);
473 goto err1;
474 }
475 mCallbackOps = callback_ops;
476
477 pthread_mutex_unlock(&mMutex);
478 mCameraInitialized = true;
479 return 0;
480
481 err1:
482 pthread_mutex_unlock(&mMutex);
483 return rc;
484 }
485
486 /*===========================================================================
487 * FUNCTION : validateStreamDimensions
488 *
489 * DESCRIPTION: Check if the configuration requested are those advertised
490 *
491 * PARAMETERS :
492 * @stream_list : streams to be configured
493 *
494 * RETURN :
495 *
496 *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)497 int QCamera3HardwareInterface::validateStreamDimensions(
498 camera3_stream_configuration_t *streamList)
499 {
500 int rc = NO_ERROR;
501 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
502
503 /*
504 * Loop through all streams requested in configuration
505 * Check if unsupported sizes have been requested on any of them
506 */
507 for (size_t j = 0; j < streamList->num_streams; j++){
508 bool sizeFound = false;
509 camera3_stream_t *newStream = streamList->streams[j];
510
511 /*
512 * Sizes are different for each type of stream format check against
513 * appropriate table.
514 */
515 switch (newStream->format) {
516 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
517 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
518 case HAL_PIXEL_FORMAT_RAW10:
519 for (int i = 0;
520 i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
521 if (gCamCapability[mCameraId]->raw_dim[i].width
522 == (int32_t) newStream->width
523 && gCamCapability[mCameraId]->raw_dim[i].height
524 == (int32_t) newStream->height) {
525 sizeFound = true;
526 }
527 }
528 break;
529 case HAL_PIXEL_FORMAT_BLOB:
530 for (int i = 0;
531 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
532 if ((int32_t)(newStream->width) ==
533 gCamCapability[mCameraId]
534 ->picture_sizes_tbl[i].width
535 && (int32_t)(newStream->height) ==
536 gCamCapability[mCameraId]
537 ->picture_sizes_tbl[i].height){
538 sizeFound = true;
539 break;
540 }
541 }
542 break;
543
544 case HAL_PIXEL_FORMAT_YCbCr_420_888:
545 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
546 default:
547 /* ZSL stream will be full active array size validate that*/
548 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
549 if ((int32_t)(newStream->width) ==
550 gCamCapability[mCameraId]->active_array_size.width
551 && (int32_t)(newStream->height) ==
552 gCamCapability[mCameraId]->active_array_size.height) {
553 sizeFound = true;
554 }
555 /* We could potentially break here to enforce ZSL stream
556 * set from frameworks always has full active array size
557 * but it is not clear from spec if framework will always
558 * follow that, also we have logic to override to full array
559 * size, so keeping this logic lenient at the moment.
560 */
561 }
562
563 /* Non ZSL stream still need to conform to advertised sizes*/
564 for (int i = 0;
565 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
566 if ((int32_t)(newStream->width) ==
567 gCamCapability[mCameraId]
568 ->picture_sizes_tbl[i].width
569 && (int32_t)(newStream->height) ==
570 gCamCapability[mCameraId]
571 ->picture_sizes_tbl[i].height){
572 sizeFound = true;
573 break;
574 }
575 }
576 break;
577 } /* End of switch(newStream->format) */
578
579 /* We error out even if a single stream has unsupported size set */
580 if (!sizeFound) {
581 ALOGE("%s: Error: Unsupported size of %d x %d requested for stream"
582 "type:%d", __func__, newStream->width, newStream->height,
583 newStream->format);
584 rc = -EINVAL;
585 break;
586 }
587 } /* End of for each stream */
588 return rc;
589 }
590
591 /*===========================================================================
592 * FUNCTION : configureStreams
593 *
594 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
595 * and output streams.
596 *
597 * PARAMETERS :
598 * @stream_list : streams to be configured
599 *
600 * RETURN :
601 *
602 *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)603 int QCamera3HardwareInterface::configureStreams(
604 camera3_stream_configuration_t *streamList)
605 {
606 int rc = 0;
607
608 // Sanity check stream_list
609 if (streamList == NULL) {
610 ALOGE("%s: NULL stream configuration", __func__);
611 return BAD_VALUE;
612 }
613 if (streamList->streams == NULL) {
614 ALOGE("%s: NULL stream list", __func__);
615 return BAD_VALUE;
616 }
617
618 if (streamList->num_streams < 1) {
619 ALOGE("%s: Bad number of streams requested: %d", __func__,
620 streamList->num_streams);
621 return BAD_VALUE;
622 }
623
624 rc = validateStreamDimensions(streamList);
625 if (rc != NO_ERROR) {
626 ALOGE("%s: Invalid stream configuration requested!", __func__);
627 return rc;
628 }
629
630 /* first invalidate all the steams in the mStreamList
631 * if they appear again, they will be validated */
632 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
633 it != mStreamInfo.end(); it++) {
634 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
635 channel->stop();
636 (*it)->status = INVALID;
637 }
638 if (mSupportChannel)
639 mSupportChannel->stop();
640 if (mMetadataChannel) {
641 /* If content of mStreamInfo is not 0, there is metadata stream */
642 mMetadataChannel->stop();
643 }
644
645 #ifdef HAS_MULTIMEDIA_HINTS
646 if(mHdrHint == true) {
647 if (m_pPowerModule) {
648 if (m_pPowerModule->powerHint) {
649 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
650 (void *)"state=3");
651 mHdrHint = false;
652 }
653 }
654 }
655 #endif
656
657 pthread_mutex_lock(&mMutex);
658
659 bool isZsl = false;
660 camera3_stream_t *inputStream = NULL;
661 camera3_stream_t *jpegStream = NULL;
662 cam_stream_size_info_t stream_config_info;
663
664 for (size_t i = 0; i < streamList->num_streams; i++) {
665 camera3_stream_t *newStream = streamList->streams[i];
666 ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
667 __func__, newStream->stream_type, newStream->format,
668 newStream->width, newStream->height);
669 //if the stream is in the mStreamList validate it
670 bool stream_exists = false;
671 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
672 it != mStreamInfo.end(); it++) {
673 if ((*it)->stream == newStream) {
674 QCamera3Channel *channel =
675 (QCamera3Channel*)(*it)->stream->priv;
676 stream_exists = true;
677 delete channel;
678 (*it)->status = VALID;
679 (*it)->stream->priv = NULL;
680 (*it)->channel = NULL;
681 }
682 }
683 if (!stream_exists) {
684 //new stream
685 stream_info_t* stream_info;
686 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
687 stream_info->stream = newStream;
688 stream_info->status = VALID;
689 stream_info->channel = NULL;
690 mStreamInfo.push_back(stream_info);
691 }
692 if (newStream->stream_type == CAMERA3_STREAM_INPUT
693 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
694 if (inputStream != NULL) {
695 ALOGE("%s: Multiple input streams requested!", __func__);
696 pthread_mutex_unlock(&mMutex);
697 return BAD_VALUE;
698 }
699 inputStream = newStream;
700 }
701 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
702 jpegStream = newStream;
703 }
704 }
705 mInputStream = inputStream;
706
707 cleanAndSortStreamInfo();
708 if (mMetadataChannel) {
709 delete mMetadataChannel;
710 mMetadataChannel = NULL;
711 }
712 if (mSupportChannel) {
713 delete mSupportChannel;
714 mSupportChannel = NULL;
715 }
716
717 //Create metadata channel and initialize it
718 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
719 mCameraHandle->ops, captureResultCb,
720 &gCamCapability[mCameraId]->padding_info, this);
721 if (mMetadataChannel == NULL) {
722 ALOGE("%s: failed to allocate metadata channel", __func__);
723 rc = -ENOMEM;
724 pthread_mutex_unlock(&mMutex);
725 return rc;
726 }
727 rc = mMetadataChannel->initialize();
728 if (rc < 0) {
729 ALOGE("%s: metadata channel initialization failed", __func__);
730 delete mMetadataChannel;
731 mMetadataChannel = NULL;
732 pthread_mutex_unlock(&mMutex);
733 return rc;
734 }
735
736 /* Create dummy stream if there is one single raw stream */
737 if (streamList->num_streams == 1 &&
738 (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
739 streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
740 mSupportChannel = new QCamera3SupportChannel(
741 mCameraHandle->camera_handle,
742 mCameraHandle->ops,
743 &gCamCapability[mCameraId]->padding_info,
744 this);
745 if (!mSupportChannel) {
746 ALOGE("%s: dummy channel cannot be created", __func__);
747 pthread_mutex_unlock(&mMutex);
748 return -ENOMEM;
749 }
750 }
751
752 /* Allocate channel objects for the requested streams */
753 for (size_t i = 0; i < streamList->num_streams; i++) {
754 camera3_stream_t *newStream = streamList->streams[i];
755 uint32_t stream_usage = newStream->usage;
756 stream_config_info.stream_sizes[i].width = newStream->width;
757 stream_config_info.stream_sizes[i].height = newStream->height;
758 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
759 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
760 //for zsl stream the size is active array size
761 isZsl = true;
762 stream_config_info.stream_sizes[i].width =
763 gCamCapability[mCameraId]->active_array_size.width;
764 stream_config_info.stream_sizes[i].height =
765 gCamCapability[mCameraId]->active_array_size.height;
766 stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
767 } else {
768 //for non zsl streams find out the format
769 switch (newStream->format) {
770 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
771 {
772 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
773 stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
774 } else {
775 stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
776 }
777 }
778 break;
779 case HAL_PIXEL_FORMAT_YCbCr_420_888:
780 stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
781 #ifdef HAS_MULTIMEDIA_HINTS
782 if (m_pPowerModule) {
783 if (m_pPowerModule->powerHint) {
784 m_pPowerModule->powerHint(m_pPowerModule,
785 POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
786 mHdrHint = true;
787 }
788 }
789 #endif
790 break;
791 case HAL_PIXEL_FORMAT_BLOB:
792 stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
793 break;
794 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
795 case HAL_PIXEL_FORMAT_RAW16:
796 stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
797 break;
798 default:
799 stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
800 break;
801 }
802 }
803 if (newStream->priv == NULL) {
804 //New stream, construct channel
805 switch (newStream->stream_type) {
806 case CAMERA3_STREAM_INPUT:
807 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
808 break;
809 case CAMERA3_STREAM_BIDIRECTIONAL:
810 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
811 GRALLOC_USAGE_HW_CAMERA_WRITE;
812 break;
813 case CAMERA3_STREAM_OUTPUT:
814 /* For video encoding stream, set read/write rarely
815 * flag so that they may be set to un-cached */
816 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
817 newStream->usage =
818 (GRALLOC_USAGE_SW_READ_RARELY |
819 GRALLOC_USAGE_SW_WRITE_RARELY |
820 GRALLOC_USAGE_HW_CAMERA_WRITE);
821 else
822 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
823 break;
824 default:
825 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
826 break;
827 }
828
829 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
830 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
831 QCamera3Channel *channel = NULL;
832 switch (newStream->format) {
833 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
834 case HAL_PIXEL_FORMAT_YCbCr_420_888:
835 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
836 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
837 mCameraHandle->ops, captureResultCb,
838 &gCamCapability[mCameraId]->padding_info,
839 this,
840 newStream,
841 (cam_stream_type_t) stream_config_info.type[i]);
842 if (channel == NULL) {
843 ALOGE("%s: allocation of channel failed", __func__);
844 pthread_mutex_unlock(&mMutex);
845 return -ENOMEM;
846 }
847
848 newStream->priv = channel;
849 break;
850 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
851 case HAL_PIXEL_FORMAT_RAW16:
852 newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
853 mRawChannel = new QCamera3RawChannel(
854 mCameraHandle->camera_handle,
855 mCameraHandle->ops, captureResultCb,
856 &gCamCapability[mCameraId]->padding_info,
857 this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
858 if (mRawChannel == NULL) {
859 ALOGE("%s: allocation of raw channel failed", __func__);
860 pthread_mutex_unlock(&mMutex);
861 return -ENOMEM;
862 }
863
864 newStream->priv = (QCamera3Channel*)mRawChannel;
865 break;
866 case HAL_PIXEL_FORMAT_BLOB:
867 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
868 mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
869 mCameraHandle->ops, captureResultCb,
870 &gCamCapability[mCameraId]->padding_info, this, newStream);
871 if (mPictureChannel == NULL) {
872 ALOGE("%s: allocation of channel failed", __func__);
873 pthread_mutex_unlock(&mMutex);
874 return -ENOMEM;
875 }
876 newStream->priv = (QCamera3Channel*)mPictureChannel;
877 break;
878
879 default:
880 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
881 break;
882 }
883 }
884
885 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
886 it != mStreamInfo.end(); it++) {
887 if ((*it)->stream == newStream) {
888 (*it)->channel = (QCamera3Channel*) newStream->priv;
889 break;
890 }
891 }
892 } else {
893 // Channel already exists for this stream
894 // Do nothing for now
895 }
896 }
897
898 if (isZsl)
899 mPictureChannel->overrideYuvSize(
900 gCamCapability[mCameraId]->active_array_size.width,
901 gCamCapability[mCameraId]->active_array_size.height);
902
903 int32_t hal_version = CAM_HAL_V3;
904 stream_config_info.num_streams = streamList->num_streams;
905 if (mSupportChannel) {
906 stream_config_info.stream_sizes[stream_config_info.num_streams] =
907 QCamera3SupportChannel::kDim;
908 stream_config_info.type[stream_config_info.num_streams] =
909 CAM_STREAM_TYPE_CALLBACK;
910 stream_config_info.num_streams++;
911 }
912
913 // settings/parameters don't carry over for new configureStreams
914 memset(mParameters, 0, sizeof(metadata_buffer_t));
915
916 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
917 AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
918 sizeof(hal_version), &hal_version);
919
920 AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
921 sizeof(stream_config_info), &stream_config_info);
922
923 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
924
925 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
926 mPendingRequestsList.clear();
927 mPendingFrameDropList.clear();
928 // Initialize/Reset the pending buffers list
929 mPendingBuffersMap.num_buffers = 0;
930 mPendingBuffersMap.mPendingBufferList.clear();
931
932 mFirstRequest = true;
933
934 //Get min frame duration for this streams configuration
935 deriveMinFrameDuration();
936
937 pthread_mutex_unlock(&mMutex);
938 return rc;
939 }
940
941 /*===========================================================================
942 * FUNCTION : validateCaptureRequest
943 *
944 * DESCRIPTION: validate a capture request from camera service
945 *
946 * PARAMETERS :
947 * @request : request from framework to process
948 *
949 * RETURN :
950 *
951 *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)952 int QCamera3HardwareInterface::validateCaptureRequest(
953 camera3_capture_request_t *request)
954 {
955 ssize_t idx = 0;
956 const camera3_stream_buffer_t *b;
957 CameraMetadata meta;
958
959 /* Sanity check the request */
960 if (request == NULL) {
961 ALOGE("%s: NULL capture request", __func__);
962 return BAD_VALUE;
963 }
964
965 if (request->settings == NULL && mFirstRequest) {
966 /*settings cannot be null for the first request*/
967 return BAD_VALUE;
968 }
969
970 uint32_t frameNumber = request->frame_number;
971 if (request->input_buffer != NULL &&
972 request->input_buffer->stream != mInputStream) {
973 ALOGE("%s: Request %d: Input buffer not from input stream!",
974 __FUNCTION__, frameNumber);
975 return BAD_VALUE;
976 }
977 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
978 ALOGE("%s: Request %d: No output buffers provided!",
979 __FUNCTION__, frameNumber);
980 return BAD_VALUE;
981 }
982 if (request->input_buffer != NULL) {
983 b = request->input_buffer;
984 QCamera3Channel *channel =
985 static_cast<QCamera3Channel*>(b->stream->priv);
986 if (channel == NULL) {
987 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
988 __func__, frameNumber, idx);
989 return BAD_VALUE;
990 }
991 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
992 ALOGE("%s: Request %d: Buffer %d: Status not OK!",
993 __func__, frameNumber, idx);
994 return BAD_VALUE;
995 }
996 if (b->release_fence != -1) {
997 ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
998 __func__, frameNumber, idx);
999 return BAD_VALUE;
1000 }
1001 if (b->buffer == NULL) {
1002 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
1003 __func__, frameNumber, idx);
1004 return BAD_VALUE;
1005 }
1006 }
1007
1008 // Validate all buffers
1009 b = request->output_buffers;
1010 do {
1011 QCamera3Channel *channel =
1012 static_cast<QCamera3Channel*>(b->stream->priv);
1013 if (channel == NULL) {
1014 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
1015 __func__, frameNumber, idx);
1016 return BAD_VALUE;
1017 }
1018 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1019 ALOGE("%s: Request %d: Buffer %d: Status not OK!",
1020 __func__, frameNumber, idx);
1021 return BAD_VALUE;
1022 }
1023 if (b->release_fence != -1) {
1024 ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
1025 __func__, frameNumber, idx);
1026 return BAD_VALUE;
1027 }
1028 if (b->buffer == NULL) {
1029 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
1030 __func__, frameNumber, idx);
1031 return BAD_VALUE;
1032 }
1033 idx++;
1034 b = request->output_buffers + idx;
1035 } while (idx < (ssize_t)request->num_output_buffers);
1036
1037 return NO_ERROR;
1038 }
1039
1040 /*===========================================================================
1041 * FUNCTION : deriveMinFrameDuration
1042 *
1043 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1044 * on currently configured streams.
1045 *
1046 * PARAMETERS : NONE
1047 *
1048 * RETURN : NONE
1049 *
1050 *==========================================================================*/
deriveMinFrameDuration()1051 void QCamera3HardwareInterface::deriveMinFrameDuration()
1052 {
1053 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1054
1055 maxJpegDim = 0;
1056 maxProcessedDim = 0;
1057 maxRawDim = 0;
1058
1059 // Figure out maximum jpeg, processed, and raw dimensions
1060 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1061 it != mStreamInfo.end(); it++) {
1062
1063 // Input stream doesn't have valid stream_type
1064 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1065 continue;
1066
1067 int32_t dimension = (*it)->stream->width * (*it)->stream->height;
1068 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1069 if (dimension > maxJpegDim)
1070 maxJpegDim = dimension;
1071 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1072 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1073 if (dimension > maxRawDim)
1074 maxRawDim = dimension;
1075 } else {
1076 if (dimension > maxProcessedDim)
1077 maxProcessedDim = dimension;
1078 }
1079 }
1080
1081 //Assume all jpeg dimensions are in processed dimensions.
1082 if (maxJpegDim > maxProcessedDim)
1083 maxProcessedDim = maxJpegDim;
1084 //Find the smallest raw dimension that is greater or equal to jpeg dimension
1085 if (maxProcessedDim > maxRawDim) {
1086 maxRawDim = INT32_MAX;
1087 for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1088 i++) {
1089
1090 int32_t dimension =
1091 gCamCapability[mCameraId]->raw_dim[i].width *
1092 gCamCapability[mCameraId]->raw_dim[i].height;
1093
1094 if (dimension >= maxProcessedDim && dimension < maxRawDim)
1095 maxRawDim = dimension;
1096 }
1097 }
1098
1099 //Find minimum durations for processed, jpeg, and raw
1100 for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
1101 i++) {
1102 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1103 gCamCapability[mCameraId]->raw_dim[i].height) {
1104 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1105 break;
1106 }
1107 }
1108 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
1109 if (maxProcessedDim ==
1110 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1111 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1112 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1113 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1114 break;
1115 }
1116 }
1117 }
1118
1119 /*===========================================================================
1120 * FUNCTION : getMinFrameDuration
1121 *
1122 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1123 * and current request configuration.
1124 *
1125 * PARAMETERS : @request: requset sent by the frameworks
1126 *
1127 * RETURN : min farme duration for a particular request
1128 *
1129 *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)1130 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1131 {
1132 bool hasJpegStream = false;
1133 bool hasRawStream = false;
1134 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1135 const camera3_stream_t *stream = request->output_buffers[i].stream;
1136 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1137 hasJpegStream = true;
1138 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1139 stream->format == HAL_PIXEL_FORMAT_RAW16)
1140 hasRawStream = true;
1141 }
1142
1143 if (!hasJpegStream)
1144 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1145 else
1146 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1147 }
1148
1149 /*===========================================================================
1150 * FUNCTION : handleMetadataWithLock
1151 *
1152 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
1153 *
1154 * PARAMETERS : @metadata_buf: metadata buffer
1155 *
1156 * RETURN :
1157 *
1158 *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf)1159 void QCamera3HardwareInterface::handleMetadataWithLock(
1160 mm_camera_super_buf_t *metadata_buf)
1161 {
1162 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1163 int32_t frame_number_valid = *(int32_t *)
1164 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1165 uint32_t pending_requests = *(uint32_t *)POINTER_OF(
1166 CAM_INTF_META_PENDING_REQUESTS, metadata);
1167 uint32_t frame_number = *(uint32_t *)
1168 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
1169 const struct timeval *tv = (const struct timeval *)
1170 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1171 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
1172 tv->tv_usec * NSEC_PER_USEC;
1173 cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
1174 POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
1175
1176 int32_t urgent_frame_number_valid = *(int32_t *)
1177 POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1178 uint32_t urgent_frame_number = *(uint32_t *)
1179 POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1180
1181 if (urgent_frame_number_valid) {
1182 ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
1183 __func__, urgent_frame_number, capture_time);
1184
1185 //Recieved an urgent Frame Number, handle it
1186 //using partial results
1187 for (List<PendingRequestInfo>::iterator i =
1188 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
1189 camera3_notify_msg_t notify_msg;
1190 ALOGV("%s: Iterator Frame = %d urgent frame = %d",
1191 __func__, i->frame_number, urgent_frame_number);
1192
1193 if (i->frame_number < urgent_frame_number &&
1194 i->bNotified == 0) {
1195 notify_msg.type = CAMERA3_MSG_SHUTTER;
1196 notify_msg.message.shutter.frame_number = i->frame_number;
1197 notify_msg.message.shutter.timestamp = capture_time -
1198 (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
1199 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1200 i->timestamp = notify_msg.message.shutter.timestamp;
1201 i->bNotified = 1;
1202 ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
1203 __func__, i->frame_number, notify_msg.message.shutter.timestamp);
1204 }
1205
1206 if (i->frame_number == urgent_frame_number) {
1207
1208 camera3_capture_result_t result;
1209 memset(&result, 0, sizeof(camera3_capture_result_t));
1210
1211 // Send shutter notify to frameworks
1212 notify_msg.type = CAMERA3_MSG_SHUTTER;
1213 notify_msg.message.shutter.frame_number = i->frame_number;
1214 notify_msg.message.shutter.timestamp = capture_time;
1215 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1216
1217 i->timestamp = capture_time;
1218 i->bNotified = 1;
1219 i->partial_result_cnt++;
1220 // Extract 3A metadata
1221 result.result =
1222 translateCbUrgentMetadataToResultMetadata(metadata);
1223 // Populate metadata result
1224 result.frame_number = urgent_frame_number;
1225 result.num_output_buffers = 0;
1226 result.output_buffers = NULL;
1227 result.partial_result = i->partial_result_cnt;
1228
1229 mCallbackOps->process_capture_result(mCallbackOps, &result);
1230 ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
1231 __func__, result.frame_number, capture_time);
1232 free_camera_metadata((camera_metadata_t *)result.result);
1233 break;
1234 }
1235 }
1236 }
1237
1238 if (!frame_number_valid) {
1239 ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
1240 mMetadataChannel->bufDone(metadata_buf);
1241 free(metadata_buf);
1242 goto done_metadata;
1243 }
1244 ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
1245 frame_number, capture_time);
1246
1247 // Go through the pending requests info and send shutter/results to frameworks
1248 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1249 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
1250 camera3_capture_result_t result;
1251 memset(&result, 0, sizeof(camera3_capture_result_t));
1252 ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
1253
1254 i->partial_result_cnt++;
1255 result.partial_result = i->partial_result_cnt;
1256
1257 // Flush out all entries with less or equal frame numbers.
1258 mPendingRequest--;
1259
1260 // Check whether any stream buffer corresponding to this is dropped or not
1261 // If dropped, then notify ERROR_BUFFER for the corresponding stream and
1262 // buffer with CAMERA3_BUFFER_STATUS_ERROR
1263 if (cam_frame_drop.frame_dropped) {
1264 camera3_notify_msg_t notify_msg;
1265 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1266 j != i->buffers.end(); j++) {
1267 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1268 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1269 for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
1270 if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
1271 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
1272 ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
1273 __func__, i->frame_number, streamID);
1274 notify_msg.type = CAMERA3_MSG_ERROR;
1275 notify_msg.message.error.frame_number = i->frame_number;
1276 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
1277 notify_msg.message.error.error_stream = j->stream;
1278 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1279 ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
1280 __func__, i->frame_number, streamID);
1281 PendingFrameDropInfo PendingFrameDrop;
1282 PendingFrameDrop.frame_number=i->frame_number;
1283 PendingFrameDrop.stream_ID = streamID;
1284 // Add the Frame drop info to mPendingFrameDropList
1285 mPendingFrameDropList.push_back(PendingFrameDrop);
1286 }
1287 }
1288 }
1289 }
1290
1291 // Send empty metadata with already filled buffers for dropped metadata
1292 // and send valid metadata with already filled buffers for current metadata
1293 if (i->frame_number < frame_number) {
1294 CameraMetadata dummyMetadata;
1295 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
1296 &i->timestamp, 1);
1297 dummyMetadata.update(ANDROID_REQUEST_ID,
1298 &(i->request_id), 1);
1299 result.result = dummyMetadata.release();
1300 } else {
1301 result.result = translateFromHalMetadata(metadata,
1302 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth);
1303
1304 if (i->blob_request) {
1305 {
1306 //Dump tuning metadata if enabled and available
1307 char prop[PROPERTY_VALUE_MAX];
1308 memset(prop, 0, sizeof(prop));
1309 property_get("persist.camera.dumpmetadata", prop, "0");
1310 int32_t enabled = atoi(prop);
1311 if (enabled && metadata->is_tuning_params_valid) {
1312 dumpMetadataToFile(metadata->tuning_params,
1313 mMetaFrameCount,
1314 enabled,
1315 "Snapshot",
1316 frame_number);
1317 }
1318 }
1319
1320 //If it is a blob request then send the metadata to the picture channel
1321 metadata_buffer_t *reproc_meta =
1322 (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
1323 if (reproc_meta == NULL) {
1324 ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
1325 goto done_metadata;
1326 }
1327 *reproc_meta = *metadata;
1328 mPictureChannel->queueReprocMetadata(reproc_meta);
1329 }
1330 // Return metadata buffer
1331 mMetadataChannel->bufDone(metadata_buf);
1332 free(metadata_buf);
1333 }
1334 if (!result.result) {
1335 ALOGE("%s: metadata is NULL", __func__);
1336 }
1337 result.frame_number = i->frame_number;
1338 result.num_output_buffers = 0;
1339 result.output_buffers = NULL;
1340 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1341 j != i->buffers.end(); j++) {
1342 if (j->buffer) {
1343 result.num_output_buffers++;
1344 }
1345 }
1346
1347 if (result.num_output_buffers > 0) {
1348 camera3_stream_buffer_t *result_buffers =
1349 new camera3_stream_buffer_t[result.num_output_buffers];
1350 if (!result_buffers) {
1351 ALOGE("%s: Fatal error: out of memory", __func__);
1352 }
1353 size_t result_buffers_idx = 0;
1354 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1355 j != i->buffers.end(); j++) {
1356 if (j->buffer) {
1357 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1358 m != mPendingFrameDropList.end(); m++) {
1359 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
1360 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1361 if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1362 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1363 ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1364 __func__, frame_number, streamID);
1365 m = mPendingFrameDropList.erase(m);
1366 break;
1367 }
1368 }
1369
1370 for (List<PendingBufferInfo>::iterator k =
1371 mPendingBuffersMap.mPendingBufferList.begin();
1372 k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
1373 if (k->buffer == j->buffer->buffer) {
1374 ALOGV("%s: Found buffer %p in pending buffer List "
1375 "for frame %d, Take it out!!", __func__,
1376 k->buffer, k->frame_number);
1377 mPendingBuffersMap.num_buffers--;
1378 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1379 break;
1380 }
1381 }
1382
1383 result_buffers[result_buffers_idx++] = *(j->buffer);
1384 free(j->buffer);
1385 j->buffer = NULL;
1386 }
1387 }
1388 result.output_buffers = result_buffers;
1389 mCallbackOps->process_capture_result(mCallbackOps, &result);
1390 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1391 __func__, result.frame_number, i->timestamp);
1392 free_camera_metadata((camera_metadata_t *)result.result);
1393 delete[] result_buffers;
1394 } else {
1395 mCallbackOps->process_capture_result(mCallbackOps, &result);
1396 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
1397 __func__, result.frame_number, i->timestamp);
1398 free_camera_metadata((camera_metadata_t *)result.result);
1399 }
1400 // erase the element from the list
1401 i = mPendingRequestsList.erase(i);
1402 }
1403
1404 done_metadata:
1405 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1406 i != mPendingRequestsList.end() ;i++) {
1407 i->pipeline_depth++;
1408 }
1409 if (!pending_requests)
1410 unblockRequestIfNecessary();
1411
1412 }
1413
1414 /*===========================================================================
1415 * FUNCTION : handleBufferWithLock
1416 *
1417 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
1418 *
1419 * PARAMETERS : @buffer: image buffer for the callback
1420 * @frame_number: frame number of the image buffer
1421 *
1422 * RETURN :
1423 *
1424 *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)1425 void QCamera3HardwareInterface::handleBufferWithLock(
1426 camera3_stream_buffer_t *buffer, uint32_t frame_number)
1427 {
1428 // If the frame number doesn't exist in the pending request list,
1429 // directly send the buffer to the frameworks, and update pending buffers map
1430 // Otherwise, book-keep the buffer.
1431 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1432 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
1433 i++;
1434 }
1435 if (i == mPendingRequestsList.end()) {
1436 // Verify all pending requests frame_numbers are greater
1437 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
1438 j != mPendingRequestsList.end(); j++) {
1439 if (j->frame_number < frame_number) {
1440 ALOGE("%s: Error: pending frame number %d is smaller than %d",
1441 __func__, j->frame_number, frame_number);
1442 }
1443 }
1444 camera3_capture_result_t result;
1445 memset(&result, 0, sizeof(camera3_capture_result_t));
1446 result.result = NULL;
1447 result.frame_number = frame_number;
1448 result.num_output_buffers = 1;
1449 result.partial_result = 0;
1450 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
1451 m != mPendingFrameDropList.end(); m++) {
1452 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
1453 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
1454 if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
1455 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
1456 ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
1457 __func__, frame_number, streamID);
1458 m = mPendingFrameDropList.erase(m);
1459 break;
1460 }
1461 }
1462 result.output_buffers = buffer;
1463 ALOGV("%s: result frame_number = %d, buffer = %p",
1464 __func__, frame_number, buffer->buffer);
1465
1466 for (List<PendingBufferInfo>::iterator k =
1467 mPendingBuffersMap.mPendingBufferList.begin();
1468 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
1469 if (k->buffer == buffer->buffer) {
1470 ALOGV("%s: Found Frame buffer, take it out from list",
1471 __func__);
1472
1473 mPendingBuffersMap.num_buffers--;
1474 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1475 break;
1476 }
1477 }
1478 ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1479 __func__, mPendingBuffersMap.num_buffers);
1480
1481 mCallbackOps->process_capture_result(mCallbackOps, &result);
1482 } else {
1483 if (i->input_buffer_present) {
1484 camera3_capture_result result;
1485 memset(&result, 0, sizeof(camera3_capture_result_t));
1486 result.result = NULL;
1487 result.frame_number = frame_number;
1488 result.num_output_buffers = 1;
1489 result.output_buffers = buffer;
1490 result.partial_result = 0;
1491 mCallbackOps->process_capture_result(mCallbackOps, &result);
1492 i = mPendingRequestsList.erase(i);
1493 mPendingRequest--;
1494 } else {
1495 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
1496 j != i->buffers.end(); j++) {
1497 if (j->stream == buffer->stream) {
1498 if (j->buffer != NULL) {
1499 ALOGE("%s: Error: buffer is already set", __func__);
1500 } else {
1501 j->buffer = (camera3_stream_buffer_t *)malloc(
1502 sizeof(camera3_stream_buffer_t));
1503 *(j->buffer) = *buffer;
1504 ALOGV("%s: cache buffer %p at result frame_number %d",
1505 __func__, buffer, frame_number);
1506 }
1507 }
1508 }
1509 }
1510 }
1511 }
1512
1513 /*===========================================================================
1514 * FUNCTION : unblockRequestIfNecessary
1515 *
1516 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
1517 * that mMutex is held when this function is called.
1518 *
1519 * PARAMETERS :
1520 *
1521 * RETURN :
1522 *
1523 *==========================================================================*/
unblockRequestIfNecessary()1524 void QCamera3HardwareInterface::unblockRequestIfNecessary()
1525 {
1526 // Unblock process_capture_request
1527 pthread_cond_signal(&mRequestCond);
1528 }
1529
1530 /*===========================================================================
1531 * FUNCTION : registerStreamBuffers
1532 *
1533 * DESCRIPTION: Register buffers for a given stream with the HAL device.
1534 *
1535 * PARAMETERS :
1536 * @stream_list : streams to be configured
1537 *
1538 * RETURN :
1539 *
1540 *==========================================================================*/
registerStreamBuffers(const camera3_stream_buffer_set_t *)1541 int QCamera3HardwareInterface::registerStreamBuffers(
1542 const camera3_stream_buffer_set_t * /*buffer_set*/)
1543 {
1544 //Deprecated
1545 return NO_ERROR;
1546 }
1547
1548 /*===========================================================================
1549 * FUNCTION : processCaptureRequest
1550 *
1551 * DESCRIPTION: process a capture request from camera service
1552 *
1553 * PARAMETERS :
1554 * @request : request from framework to process
1555 *
1556 * RETURN :
1557 *
1558 *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)1559 int QCamera3HardwareInterface::processCaptureRequest(
1560 camera3_capture_request_t *request)
1561 {
1562 int rc = NO_ERROR;
1563 int32_t request_id;
1564 CameraMetadata meta;
1565
1566 pthread_mutex_lock(&mMutex);
1567
1568 rc = validateCaptureRequest(request);
1569 if (rc != NO_ERROR) {
1570 ALOGE("%s: incoming request is not valid", __func__);
1571 pthread_mutex_unlock(&mMutex);
1572 return rc;
1573 }
1574
1575 meta = request->settings;
1576
1577 // For first capture request, send capture intent, and
1578 // stream on all streams
1579 if (mFirstRequest) {
1580
1581 for (size_t i = 0; i < request->num_output_buffers; i++) {
1582 const camera3_stream_buffer_t& output = request->output_buffers[i];
1583 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1584 rc = channel->registerBuffer(output.buffer);
1585 if (rc < 0) {
1586 ALOGE("%s: registerBuffer failed",
1587 __func__);
1588 pthread_mutex_unlock(&mMutex);
1589 return -ENODEV;
1590 }
1591 }
1592
1593 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
1594 int32_t hal_version = CAM_HAL_V3;
1595 uint8_t captureIntent =
1596 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
1597
1598 memset(mParameters, 0, sizeof(metadata_buffer_t));
1599 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
1600 AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
1601 sizeof(hal_version), &hal_version);
1602 AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
1603 sizeof(captureIntent), &captureIntent);
1604 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
1605 mParameters);
1606 }
1607
1608 //First initialize all streams
1609 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1610 it != mStreamInfo.end(); it++) {
1611 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1612 rc = channel->initialize();
1613 if (NO_ERROR != rc) {
1614 ALOGE("%s : Channel initialization failed %d", __func__, rc);
1615 pthread_mutex_unlock(&mMutex);
1616 return rc;
1617 }
1618 }
1619 if (mSupportChannel) {
1620 rc = mSupportChannel->initialize();
1621 if (rc < 0) {
1622 ALOGE("%s: Support channel initialization failed", __func__);
1623 pthread_mutex_unlock(&mMutex);
1624 return rc;
1625 }
1626 }
1627
1628 //Then start them.
1629 ALOGD("%s: Start META Channel", __func__);
1630 mMetadataChannel->start();
1631
1632 if (mSupportChannel) {
1633 rc = mSupportChannel->start();
1634 if (rc < 0) {
1635 ALOGE("%s: Support channel start failed", __func__);
1636 mMetadataChannel->stop();
1637 pthread_mutex_unlock(&mMutex);
1638 return rc;
1639 }
1640 }
1641 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1642 it != mStreamInfo.end(); it++) {
1643 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1644 ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
1645 channel->start();
1646 }
1647 }
1648
1649 uint32_t frameNumber = request->frame_number;
1650 cam_stream_ID_t streamID;
1651
1652 if (meta.exists(ANDROID_REQUEST_ID)) {
1653 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
1654 mCurrentRequestId = request_id;
1655 ALOGV("%s: Received request with id: %d",__func__, request_id);
1656 } else if (mFirstRequest || mCurrentRequestId == -1){
1657 ALOGE("%s: Unable to find request id field, \
1658 & no previous id available", __func__);
1659 return NAME_NOT_FOUND;
1660 } else {
1661 ALOGV("%s: Re-using old request id", __func__);
1662 request_id = mCurrentRequestId;
1663 }
1664
1665 ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
1666 __func__, __LINE__,
1667 request->num_output_buffers,
1668 request->input_buffer,
1669 frameNumber);
1670 // Acquire all request buffers first
1671 streamID.num_streams = 0;
1672 int blob_request = 0;
1673 for (size_t i = 0; i < request->num_output_buffers; i++) {
1674 const camera3_stream_buffer_t& output = request->output_buffers[i];
1675 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1676 sp<Fence> acquireFence = new Fence(output.acquire_fence);
1677
1678 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1679 //Call function to store local copy of jpeg data for encode params.
1680 blob_request = 1;
1681 }
1682
1683 rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
1684 if (rc != OK) {
1685 ALOGE("%s: fence wait failed %d", __func__, rc);
1686 pthread_mutex_unlock(&mMutex);
1687 return rc;
1688 }
1689
1690 streamID.streamID[streamID.num_streams] =
1691 channel->getStreamID(channel->getStreamTypeMask());
1692 streamID.num_streams++;
1693 }
1694
1695 if(request->input_buffer == NULL) {
1696 rc = setFrameParameters(request, streamID);
1697 if (rc < 0) {
1698 ALOGE("%s: fail to set frame parameters", __func__);
1699 pthread_mutex_unlock(&mMutex);
1700 return rc;
1701 }
1702 }
1703
1704 /* Update pending request list and pending buffers map */
1705 PendingRequestInfo pendingRequest;
1706 pendingRequest.frame_number = frameNumber;
1707 pendingRequest.num_buffers = request->num_output_buffers;
1708 pendingRequest.request_id = request_id;
1709 pendingRequest.blob_request = blob_request;
1710 pendingRequest.bNotified = 0;
1711 pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
1712 pendingRequest.pipeline_depth = 0;
1713 pendingRequest.partial_result_cnt = 0;
1714 extractJpegMetadata(pendingRequest.jpegMetadata, request);
1715
1716 for (size_t i = 0; i < request->num_output_buffers; i++) {
1717 RequestedBufferInfo requestedBuf;
1718 requestedBuf.stream = request->output_buffers[i].stream;
1719 requestedBuf.buffer = NULL;
1720 pendingRequest.buffers.push_back(requestedBuf);
1721
1722 // Add to buffer handle the pending buffers list
1723 PendingBufferInfo bufferInfo;
1724 bufferInfo.frame_number = frameNumber;
1725 bufferInfo.buffer = request->output_buffers[i].buffer;
1726 bufferInfo.stream = request->output_buffers[i].stream;
1727 mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
1728 mPendingBuffersMap.num_buffers++;
1729 ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
1730 __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
1731 bufferInfo.stream->format);
1732 }
1733 ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
1734 __func__, mPendingBuffersMap.num_buffers);
1735
1736 mPendingRequestsList.push_back(pendingRequest);
1737
1738 // Notify metadata channel we receive a request
1739 mMetadataChannel->request(NULL, frameNumber);
1740
1741 // Call request on other streams
1742 for (size_t i = 0; i < request->num_output_buffers; i++) {
1743 const camera3_stream_buffer_t& output = request->output_buffers[i];
1744 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
1745 mm_camera_buf_def_t *pInputBuffer = NULL;
1746
1747 if (channel == NULL) {
1748 ALOGE("%s: invalid channel pointer for stream", __func__);
1749 continue;
1750 }
1751
1752 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
1753 QCamera3RegularChannel* inputChannel = NULL;
1754 if(request->input_buffer != NULL){
1755
1756 //Try to get the internal format
1757 inputChannel = (QCamera3RegularChannel*)
1758 request->input_buffer->stream->priv;
1759 if(inputChannel == NULL ){
1760 ALOGE("%s: failed to get input channel handle", __func__);
1761 } else {
1762 pInputBuffer =
1763 inputChannel->getInternalFormatBuffer(
1764 request->input_buffer->buffer);
1765 ALOGD("%s: Input buffer dump",__func__);
1766 ALOGD("Stream id: %d", pInputBuffer->stream_id);
1767 ALOGD("streamtype:%d", pInputBuffer->stream_type);
1768 ALOGD("frame len:%d", pInputBuffer->frame_len);
1769 ALOGD("Handle:%p", request->input_buffer->buffer);
1770 }
1771 rc = channel->request(output.buffer, frameNumber,
1772 pInputBuffer, mParameters);
1773 if (rc < 0) {
1774 ALOGE("%s: Fail to request on picture channel", __func__);
1775 pthread_mutex_unlock(&mMutex);
1776 return rc;
1777 }
1778
1779 rc = setReprocParameters(request);
1780 if (rc < 0) {
1781 ALOGE("%s: fail to set reproc parameters", __func__);
1782 pthread_mutex_unlock(&mMutex);
1783 return rc;
1784 }
1785 } else{
1786 ALOGV("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
1787 __LINE__, output.buffer, frameNumber);
1788 if (mRepeatingRequest) {
1789 rc = channel->request(output.buffer, frameNumber,
1790 NULL, mPrevParameters);
1791 } else {
1792 rc = channel->request(output.buffer, frameNumber,
1793 NULL, mParameters);
1794 }
1795 }
1796 } else {
1797 ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
1798 __LINE__, output.buffer, frameNumber);
1799 rc = channel->request(output.buffer, frameNumber);
1800 }
1801 if (rc < 0)
1802 ALOGE("%s: request failed", __func__);
1803 }
1804
1805 /*set the parameters to backend*/
1806 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1807
1808 mFirstRequest = false;
1809 // Added a timed condition wait
1810 struct timespec ts;
1811 uint8_t isValidTimeout = 1;
1812 rc = clock_gettime(CLOCK_REALTIME, &ts);
1813 if (rc < 0) {
1814 isValidTimeout = 0;
1815 ALOGE("%s: Error reading the real time clock!!", __func__);
1816 }
1817 else {
1818 // Make timeout as 5 sec for request to be honored
1819 ts.tv_sec += 5;
1820 }
1821 //Block on conditional variable
1822
1823 mPendingRequest++;
1824 while (mPendingRequest >= kMaxInFlight) {
1825 if (!isValidTimeout) {
1826 ALOGV("%s: Blocking on conditional wait", __func__);
1827 pthread_cond_wait(&mRequestCond, &mMutex);
1828 }
1829 else {
1830 ALOGV("%s: Blocking on timed conditional wait", __func__);
1831 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
1832 if (rc == ETIMEDOUT) {
1833 rc = -ENODEV;
1834 ALOGE("%s: Unblocked on timeout!!!!", __func__);
1835 break;
1836 }
1837 }
1838 ALOGV("%s: Unblocked", __func__);
1839 }
1840 pthread_mutex_unlock(&mMutex);
1841
1842 return rc;
1843 }
1844
1845 /*===========================================================================
1846 * FUNCTION : dump
1847 *
1848 * DESCRIPTION:
1849 *
1850 * PARAMETERS :
1851 *
1852 *
1853 * RETURN :
1854 *==========================================================================*/
dump(int)1855 void QCamera3HardwareInterface::dump(int /*fd*/)
1856 {
1857 /*Enable lock when we implement this function*/
1858 /*
1859 pthread_mutex_lock(&mMutex);
1860
1861 pthread_mutex_unlock(&mMutex);
1862 */
1863 return;
1864 }
1865
1866 /*===========================================================================
1867 * FUNCTION : flush
1868 *
1869 * DESCRIPTION:
1870 *
1871 * PARAMETERS :
1872 *
1873 *
1874 * RETURN :
1875 *==========================================================================*/
flush()1876 int QCamera3HardwareInterface::flush()
1877 {
1878 unsigned int frameNum = 0;
1879 camera3_notify_msg_t notify_msg;
1880 camera3_capture_result_t result;
1881 camera3_stream_buffer_t *pStream_Buf = NULL;
1882 FlushMap flushMap;
1883
1884 ALOGV("%s: Unblocking Process Capture Request", __func__);
1885
1886 memset(&result, 0, sizeof(camera3_capture_result_t));
1887
1888 // Stop the Streams/Channels
1889 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
1890 it != mStreamInfo.end(); it++) {
1891 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
1892 channel->stop();
1893 (*it)->status = INVALID;
1894 }
1895
1896 if (mSupportChannel) {
1897 mSupportChannel->stop();
1898 }
1899 if (mMetadataChannel) {
1900 /* If content of mStreamInfo is not 0, there is metadata stream */
1901 mMetadataChannel->stop();
1902 }
1903
1904 // Mutex Lock
1905 pthread_mutex_lock(&mMutex);
1906
1907 // Unblock process_capture_request
1908 mPendingRequest = 0;
1909 pthread_cond_signal(&mRequestCond);
1910
1911 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1912 frameNum = i->frame_number;
1913 ALOGV("%s: Oldest frame num on mPendingRequestsList = %d",
1914 __func__, frameNum);
1915
1916 // Go through the pending buffers and group them depending
1917 // on frame number
1918 for (List<PendingBufferInfo>::iterator k =
1919 mPendingBuffersMap.mPendingBufferList.begin();
1920 k != mPendingBuffersMap.mPendingBufferList.end();) {
1921
1922 if (k->frame_number < frameNum) {
1923 ssize_t idx = flushMap.indexOfKey(k->frame_number);
1924 if (idx == NAME_NOT_FOUND) {
1925 Vector<PendingBufferInfo> pending;
1926 pending.add(*k);
1927 flushMap.add(k->frame_number, pending);
1928 } else {
1929 Vector<PendingBufferInfo> &pending =
1930 flushMap.editValueFor(k->frame_number);
1931 pending.add(*k);
1932 }
1933
1934 mPendingBuffersMap.num_buffers--;
1935 k = mPendingBuffersMap.mPendingBufferList.erase(k);
1936 } else {
1937 k++;
1938 }
1939 }
1940
1941 for (size_t i = 0; i < flushMap.size(); i++) {
1942 uint32_t frame_number = flushMap.keyAt(i);
1943 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
1944
1945 // Send Error notify to frameworks for each buffer for which
1946 // metadata buffer is already sent
1947 ALOGV("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
1948 __func__, frame_number, pending.size());
1949
1950 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
1951 if (NULL == pStream_Buf) {
1952 ALOGE("%s: No memory for pending buffers array", __func__);
1953 pthread_mutex_unlock(&mMutex);
1954 return NO_MEMORY;
1955 }
1956
1957 for (size_t j = 0; j < pending.size(); j++) {
1958 const PendingBufferInfo &info = pending.itemAt(j);
1959 notify_msg.type = CAMERA3_MSG_ERROR;
1960 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
1961 notify_msg.message.error.error_stream = info.stream;
1962 notify_msg.message.error.frame_number = frame_number;
1963 pStream_Buf[j].acquire_fence = -1;
1964 pStream_Buf[j].release_fence = -1;
1965 pStream_Buf[j].buffer = info.buffer;
1966 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
1967 pStream_Buf[j].stream = info.stream;
1968 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
1969 ALOGV("%s: notify frame_number = %d stream %p", __func__,
1970 frame_number, info.stream);
1971 }
1972
1973 result.result = NULL;
1974 result.frame_number = frame_number;
1975 result.num_output_buffers = pending.size();
1976 result.output_buffers = pStream_Buf;
1977 mCallbackOps->process_capture_result(mCallbackOps, &result);
1978
1979 delete [] pStream_Buf;
1980 }
1981
1982 ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
1983
1984 flushMap.clear();
1985 for (List<PendingBufferInfo>::iterator k =
1986 mPendingBuffersMap.mPendingBufferList.begin();
1987 k != mPendingBuffersMap.mPendingBufferList.end();) {
1988 ssize_t idx = flushMap.indexOfKey(k->frame_number);
1989 if (idx == NAME_NOT_FOUND) {
1990 Vector<PendingBufferInfo> pending;
1991 pending.add(*k);
1992 flushMap.add(k->frame_number, pending);
1993 } else {
1994 Vector<PendingBufferInfo> &pending =
1995 flushMap.editValueFor(k->frame_number);
1996 pending.add(*k);
1997 }
1998
1999 mPendingBuffersMap.num_buffers--;
2000 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2001 }
2002
2003 // Go through the pending requests info and send error request to framework
2004 for (size_t i = 0; i < flushMap.size(); i++) {
2005 uint32_t frame_number = flushMap.keyAt(i);
2006 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
2007 ALOGV("%s:Sending ERROR REQUEST for frame %d",
2008 __func__, frame_number);
2009
2010 // Send shutter notify to frameworks
2011 notify_msg.type = CAMERA3_MSG_ERROR;
2012 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
2013 notify_msg.message.error.error_stream = NULL;
2014 notify_msg.message.error.frame_number = frame_number;
2015 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2016
2017 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
2018 if (NULL == pStream_Buf) {
2019 ALOGE("%s: No memory for pending buffers array", __func__);
2020 pthread_mutex_unlock(&mMutex);
2021 return NO_MEMORY;
2022 }
2023
2024 for (size_t j = 0; j < pending.size(); j++) {
2025 const PendingBufferInfo &info = pending.itemAt(j);
2026 pStream_Buf[j].acquire_fence = -1;
2027 pStream_Buf[j].release_fence = -1;
2028 pStream_Buf[j].buffer = info.buffer;
2029 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
2030 pStream_Buf[j].stream = info.stream;
2031 }
2032
2033 result.num_output_buffers = pending.size();
2034 result.output_buffers = pStream_Buf;
2035 result.result = NULL;
2036 result.frame_number = frame_number;
2037 mCallbackOps->process_capture_result(mCallbackOps, &result);
2038 delete [] pStream_Buf;
2039 }
2040
2041 /* Reset pending buffer list and requests list */
2042 mPendingRequestsList.clear();
2043 /* Reset pending frame Drop list and requests list */
2044 mPendingFrameDropList.clear();
2045
2046 flushMap.clear();
2047 mPendingBuffersMap.num_buffers = 0;
2048 mPendingBuffersMap.mPendingBufferList.clear();
2049 ALOGV("%s: Cleared all the pending buffers ", __func__);
2050
2051 mFirstRequest = true;
2052 pthread_mutex_unlock(&mMutex);
2053 return 0;
2054 }
2055
2056 /*===========================================================================
2057 * FUNCTION : captureResultCb
2058 *
2059 * DESCRIPTION: Callback handler for all capture result
2060 * (streams, as well as metadata)
2061 *
2062 * PARAMETERS :
2063 * @metadata : metadata information
2064 * @buffer : actual gralloc buffer to be returned to frameworks.
2065 * NULL if metadata.
2066 *
2067 * RETURN : NONE
2068 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number)2069 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
2070 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2071 {
2072 pthread_mutex_lock(&mMutex);
2073
2074 /* Assume flush() is called before any reprocessing. Send
2075 * notify and result immediately upon receipt of any callback*/
2076 if (mLoopBackResult) {
2077 /* Send notify */
2078 camera3_notify_msg_t notify_msg;
2079 notify_msg.type = CAMERA3_MSG_SHUTTER;
2080 notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
2081 notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
2082 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2083
2084 /* Send capture result */
2085 mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
2086 free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
2087 free(mLoopBackResult);
2088 mLoopBackResult = NULL;
2089 }
2090
2091 if (metadata_buf)
2092 handleMetadataWithLock(metadata_buf);
2093 else
2094 handleBufferWithLock(buffer, frame_number);
2095
2096 pthread_mutex_unlock(&mMutex);
2097 return;
2098 }
2099
2100 /*===========================================================================
2101 * FUNCTION : translateFromHalMetadata
2102 *
2103 * DESCRIPTION:
2104 *
2105 * PARAMETERS :
2106 * @metadata : metadata information from callback
2107 *
2108 * RETURN : camera_metadata_t*
2109 * metadata in a format specified by fwk
2110 *==========================================================================*/
2111 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth)2112 QCamera3HardwareInterface::translateFromHalMetadata(
2113 metadata_buffer_t *metadata,
2114 nsecs_t timestamp,
2115 int32_t request_id,
2116 const CameraMetadata& jpegMetadata,
2117 uint8_t pipeline_depth)
2118 {
2119 CameraMetadata camMetadata;
2120 camera_metadata_t* resultMetadata;
2121
2122 if (jpegMetadata.entryCount())
2123 camMetadata.append(jpegMetadata);
2124
2125 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1);
2126 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
2127 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
2128
2129 uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2130 uint8_t next_entry;
2131 while (curr_entry != CAM_INTF_PARM_MAX) {
2132 switch (curr_entry) {
2133 case CAM_INTF_META_FRAME_NUMBER:{
2134 int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
2135 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
2136 break;
2137 }
2138 case CAM_INTF_META_FACE_DETECTION:{
2139 cam_face_detection_data_t *faceDetectionInfo =
2140 (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
2141 uint8_t numFaces = faceDetectionInfo->num_faces_detected;
2142 int32_t faceIds[MAX_ROI];
2143 uint8_t faceScores[MAX_ROI];
2144 int32_t faceRectangles[MAX_ROI * 4];
2145 int32_t faceLandmarks[MAX_ROI * 6];
2146 int j = 0, k = 0;
2147 for (int i = 0; i < numFaces; i++) {
2148 faceIds[i] = faceDetectionInfo->faces[i].face_id;
2149 faceScores[i] = faceDetectionInfo->faces[i].score;
2150 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
2151 faceRectangles+j, -1);
2152 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
2153 j+= 4;
2154 k+= 6;
2155 }
2156
2157 if (numFaces <= 0) {
2158 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
2159 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
2160 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
2161 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
2162 }
2163
2164 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
2165 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
2166 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
2167 faceRectangles, numFaces*4);
2168 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
2169 faceLandmarks, numFaces*6);
2170 break;
2171 }
2172 case CAM_INTF_META_COLOR_CORRECT_MODE:{
2173 uint8_t *color_correct_mode =
2174 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
2175 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
2176 break;
2177 }
2178
2179 // 3A state is sent in urgent partial result (uses quirk)
2180 case CAM_INTF_META_AEC_STATE:
2181 case CAM_INTF_PARM_AEC_LOCK:
2182 case CAM_INTF_PARM_EV:
2183 case CAM_INTF_PARM_FOCUS_MODE:
2184 case CAM_INTF_META_AF_STATE:
2185 case CAM_INTF_PARM_WHITE_BALANCE:
2186 case CAM_INTF_META_AWB_REGIONS:
2187 case CAM_INTF_META_AWB_STATE:
2188 case CAM_INTF_PARM_AWB_LOCK:
2189 case CAM_INTF_META_PRECAPTURE_TRIGGER:
2190 case CAM_INTF_META_AEC_MODE:
2191 case CAM_INTF_PARM_LED_MODE:
2192 case CAM_INTF_PARM_REDEYE_REDUCTION:
2193 case CAM_INTF_META_AF_TRIGGER_NOTICE: {
2194 ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
2195 break;
2196 }
2197
2198 case CAM_INTF_META_MODE: {
2199 uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
2200 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
2201 break;
2202 }
2203
2204 case CAM_INTF_META_EDGE_MODE: {
2205 cam_edge_application_t *edgeApplication =
2206 (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
2207 uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
2208 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
2209 camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2210 break;
2211 }
2212 case CAM_INTF_META_FLASH_POWER: {
2213 uint8_t *flashPower =
2214 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
2215 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
2216 break;
2217 }
2218 case CAM_INTF_META_FLASH_FIRING_TIME: {
2219 int64_t *flashFiringTime =
2220 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
2221 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
2222 break;
2223 }
2224 case CAM_INTF_META_FLASH_STATE: {
2225 uint8_t flashState =
2226 *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
2227 if (!gCamCapability[mCameraId]->flash_available) {
2228 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2229 }
2230 camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
2231 break;
2232 }
2233 case CAM_INTF_META_FLASH_MODE:{
2234 uint8_t flashMode = *((uint8_t*)
2235 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
2236 uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
2237 sizeof(FLASH_MODES_MAP),
2238 flashMode);
2239 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
2240 break;
2241 }
2242 case CAM_INTF_META_HOTPIXEL_MODE: {
2243 uint8_t *hotPixelMode =
2244 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
2245 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
2246 break;
2247 }
2248 case CAM_INTF_META_LENS_APERTURE:{
2249 float *lensAperture =
2250 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
2251 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
2252 break;
2253 }
2254 case CAM_INTF_META_LENS_FILTERDENSITY: {
2255 float *filterDensity =
2256 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
2257 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
2258 break;
2259 }
2260 case CAM_INTF_META_LENS_FOCAL_LENGTH:{
2261 float *focalLength =
2262 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2263 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
2264 break;
2265 }
2266 case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
2267 float *focusDistance =
2268 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
2269 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
2270 break;
2271 }
2272 case CAM_INTF_META_LENS_FOCUS_RANGE: {
2273 float *focusRange =
2274 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
2275 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
2276 break;
2277 }
2278 case CAM_INTF_META_LENS_STATE: {
2279 uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
2280 camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
2281 break;
2282 }
2283 case CAM_INTF_META_LENS_OPT_STAB_MODE: {
2284 uint8_t *opticalStab =
2285 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
2286 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
2287 break;
2288 }
2289 case CAM_INTF_META_NOISE_REDUCTION_MODE: {
2290 uint8_t *noiseRedMode =
2291 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
2292 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
2293 break;
2294 }
2295 case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
2296 uint8_t *noiseRedStrength =
2297 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
2298 camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
2299 break;
2300 }
2301 case CAM_INTF_META_SCALER_CROP_REGION: {
2302 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *)
2303 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
2304 int32_t scalerCropRegion[4];
2305 scalerCropRegion[0] = hScalerCropRegion->left;
2306 scalerCropRegion[1] = hScalerCropRegion->top;
2307 scalerCropRegion[2] = hScalerCropRegion->width;
2308 scalerCropRegion[3] = hScalerCropRegion->height;
2309 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
2310 break;
2311 }
2312 case CAM_INTF_META_AEC_ROI: {
2313 cam_area_t *hAeRegions =
2314 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
2315 int32_t aeRegions[5];
2316 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
2317 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
2318 ALOGV("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
2319 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
2320 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width, hAeRegions->rect.height);
2321 break;
2322 }
2323 case CAM_INTF_META_AF_ROI:{
2324 /*af regions*/
2325 cam_area_t *hAfRegions =
2326 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
2327 int32_t afRegions[5];
2328 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
2329 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
2330 ALOGV("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
2331 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
2332 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width, hAfRegions->rect.height);
2333 break;
2334 }
2335 case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
2336 int64_t *sensorExpTime =
2337 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2338 ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
2339 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
2340 break;
2341 }
2342 case CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW:{
2343 int64_t *sensorRollingShutterSkew =
2344 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
2345 metadata);
2346 ALOGV("%s: sensorRollingShutterSkew = %lld", __func__,
2347 *sensorRollingShutterSkew);
2348 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW ,
2349 sensorRollingShutterSkew, 1);
2350 break;
2351 }
2352 case CAM_INTF_META_SENSOR_FRAME_DURATION:{
2353 int64_t *sensorFameDuration =
2354 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
2355 ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
2356 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
2357 break;
2358 }
2359 case CAM_INTF_META_SENSOR_SENSITIVITY:{
2360 int32_t sensorSensitivity =
2361 *((int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
2362 ALOGV("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
2363 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
2364
2365 double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
2366 double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
2367 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
2368 for(int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i+=2){
2369 noise_profile[i] = noise_profile_S;
2370 noise_profile[i+1] = noise_profile_O;
2371 }
2372 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
2373 2 * gCamCapability[mCameraId]->num_color_channels);
2374 break;
2375 }
2376 case CAM_INTF_PARM_BESTSHOT_MODE: {
2377 uint8_t *sceneMode =
2378 (uint8_t *)POINTER_OF(CAM_INTF_PARM_BESTSHOT_MODE, metadata);
2379 uint8_t fwkSceneMode =
2380 (uint8_t)lookupFwkName(SCENE_MODES_MAP,
2381 sizeof(SCENE_MODES_MAP)/
2382 sizeof(SCENE_MODES_MAP[0]), *sceneMode);
2383 camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
2384 &fwkSceneMode, 1);
2385 ALOGV("%s: Metadata : ANDROID_CONTROL_SCENE_MODE: %d", __func__, fwkSceneMode);
2386 break;
2387 }
2388
2389 case CAM_INTF_META_SHADING_MODE: {
2390 uint8_t *shadingMode =
2391 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
2392 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
2393 break;
2394 }
2395
2396 case CAM_INTF_META_LENS_SHADING_MAP_MODE: {
2397 uint8_t *shadingMapMode =
2398 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata);
2399 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, shadingMapMode, 1);
2400 break;
2401 }
2402
2403 case CAM_INTF_META_STATS_FACEDETECT_MODE: {
2404 uint8_t *faceDetectMode =
2405 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
2406 uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
2407 sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
2408 *faceDetectMode);
2409 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
2410 break;
2411 }
2412 case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
2413 uint8_t *histogramMode =
2414 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
2415 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
2416 break;
2417 }
2418 case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
2419 uint8_t *sharpnessMapMode =
2420 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
2421 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
2422 sharpnessMapMode, 1);
2423 break;
2424 }
2425 case CAM_INTF_META_STATS_SHARPNESS_MAP:{
2426 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *)
2427 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
2428 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
2429 (int32_t*)sharpnessMap->sharpness,
2430 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
2431 break;
2432 }
2433 case CAM_INTF_META_LENS_SHADING_MAP: {
2434 cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
2435 POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
2436 int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
2437 int map_width = gCamCapability[mCameraId]->lens_shading_map_size.width;
2438 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
2439 (float*)lensShadingMap->lens_shading,
2440 4*map_width*map_height);
2441 break;
2442 }
2443
2444 case CAM_INTF_META_TONEMAP_MODE: {
2445 uint8_t *toneMapMode =
2446 (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
2447 camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
2448 break;
2449 }
2450
2451 case CAM_INTF_META_TONEMAP_CURVES:{
2452 //Populate CAM_INTF_META_TONEMAP_CURVES
2453 /* ch0 = G, ch 1 = B, ch 2 = R*/
2454 cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
2455 POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
2456 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
2457 (float*)tonemap->curves[0].tonemap_points,
2458 tonemap->tonemap_points_cnt * 2);
2459
2460 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
2461 (float*)tonemap->curves[1].tonemap_points,
2462 tonemap->tonemap_points_cnt * 2);
2463
2464 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
2465 (float*)tonemap->curves[2].tonemap_points,
2466 tonemap->tonemap_points_cnt * 2);
2467 break;
2468 }
2469
2470 case CAM_INTF_META_COLOR_CORRECT_GAINS:{
2471 cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
2472 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
2473 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
2474 break;
2475 }
2476 case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
2477 cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
2478 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
2479 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
2480 (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
2481 break;
2482 }
2483
2484 /* DNG file realted metadata */
2485 case CAM_INTF_META_PROFILE_TONE_CURVE: {
2486 cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
2487 POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
2488 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
2489 (float*)toneCurve->curve.tonemap_points,
2490 toneCurve->tonemap_points_cnt * 2);
2491 break;
2492 }
2493
2494 case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
2495 cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
2496 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
2497 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
2498 predColorCorrectionGains->gains, 4);
2499 break;
2500 }
2501 case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
2502 cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
2503 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
2504 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
2505 (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
2506 break;
2507
2508 }
2509
2510 case CAM_INTF_META_OTP_WB_GRGB:{
2511 float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
2512 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
2513 break;
2514 }
2515
2516 case CAM_INTF_META_BLACK_LEVEL_LOCK:{
2517 uint8_t *blackLevelLock = (uint8_t*)
2518 POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
2519 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
2520 break;
2521 }
2522 case CAM_INTF_PARM_ANTIBANDING: {
2523 uint8_t *hal_ab_mode =
2524 (uint8_t *)POINTER_OF(CAM_INTF_PARM_ANTIBANDING, metadata);
2525 uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
2526 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
2527 *hal_ab_mode);
2528 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
2529 &fwk_ab_mode, 1);
2530 break;
2531 }
2532
2533 case CAM_INTF_META_CAPTURE_INTENT:{
2534 uint8_t *captureIntent = (uint8_t*)
2535 POINTER_OF(CAM_INTF_META_CAPTURE_INTENT, metadata);
2536 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, captureIntent, 1);
2537 break;
2538 }
2539
2540 case CAM_INTF_META_SCENE_FLICKER:{
2541 uint8_t *sceneFlicker = (uint8_t*)
2542 POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
2543 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
2544 break;
2545 }
2546 case CAM_INTF_PARM_EFFECT: {
2547 uint8_t *effectMode = (uint8_t*)
2548 POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
2549 uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
2550 sizeof(EFFECT_MODES_MAP),
2551 *effectMode);
2552 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
2553 break;
2554 }
2555 case CAM_INTF_META_TEST_PATTERN_DATA: {
2556 cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
2557 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
2558 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
2559 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
2560 testPatternData->mode);
2561 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
2562 &fwk_testPatternMode, 1);
2563 int32_t fwk_testPatternData[4];
2564 fwk_testPatternData[0] = testPatternData->r;
2565 fwk_testPatternData[3] = testPatternData->b;
2566 switch (gCamCapability[mCameraId]->color_arrangement) {
2567 case CAM_FILTER_ARRANGEMENT_RGGB:
2568 case CAM_FILTER_ARRANGEMENT_GRBG:
2569 fwk_testPatternData[1] = testPatternData->gr;
2570 fwk_testPatternData[2] = testPatternData->gb;
2571 break;
2572 case CAM_FILTER_ARRANGEMENT_GBRG:
2573 case CAM_FILTER_ARRANGEMENT_BGGR:
2574 fwk_testPatternData[2] = testPatternData->gr;
2575 fwk_testPatternData[1] = testPatternData->gb;
2576 break;
2577 default:
2578 ALOGE("%s: color arrangement %d is not supported", __func__,
2579 gCamCapability[mCameraId]->color_arrangement);
2580 break;
2581 }
2582 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
2583 break;
2584
2585 }
2586 case CAM_INTF_META_JPEG_GPS_COORDINATES: {
2587 double *gps_coords = (double *)POINTER_OF(
2588 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2589 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
2590 break;
2591 }
2592 case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
2593 char *gps_methods = (char *)POINTER_OF(
2594 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2595 String8 str(gps_methods);
2596 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
2597 break;
2598 }
2599 case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
2600 int64_t *gps_timestamp = (int64_t *)POINTER_OF(
2601 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2602 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
2603 break;
2604 }
2605 case CAM_INTF_META_JPEG_ORIENTATION: {
2606 int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
2607 CAM_INTF_META_JPEG_ORIENTATION, metadata);
2608 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
2609 break;
2610 }
2611 case CAM_INTF_META_JPEG_QUALITY: {
2612 uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
2613 CAM_INTF_META_JPEG_QUALITY, metadata);
2614 camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
2615 break;
2616 }
2617 case CAM_INTF_META_JPEG_THUMB_QUALITY: {
2618 uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
2619 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2620 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
2621 break;
2622 }
2623
2624 case CAM_INTF_META_JPEG_THUMB_SIZE: {
2625 cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
2626 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2627 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
2628 break;
2629 }
2630
2631 break;
2632 case CAM_INTF_META_PRIVATE_DATA: {
2633 uint8_t *privateData = (uint8_t *)
2634 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
2635 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
2636 privateData, MAX_METADATA_PAYLOAD_SIZE);
2637 break;
2638 }
2639
2640 case CAM_INTF_META_NEUTRAL_COL_POINT:{
2641 cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
2642 POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
2643 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
2644 (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
2645 break;
2646 }
2647
2648 default:
2649 ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
2650 __func__, curr_entry);
2651 break;
2652 }
2653 next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2654 curr_entry = next_entry;
2655 }
2656
2657 /* Constant metadata values to be update*/
2658 uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2659 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
2660
2661 uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2662 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2663
2664 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
2665 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
2666
2667 int32_t hotPixelMap[2];
2668 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
2669
2670 uint8_t cac = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
2671 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2672 &cac,
2673 1);
2674
2675 resultMetadata = camMetadata.release();
2676 return resultMetadata;
2677 }
2678
2679 /*===========================================================================
2680 * FUNCTION : translateCbUrgentMetadataToResultMetadata
2681 *
2682 * DESCRIPTION:
2683 *
2684 * PARAMETERS :
2685 * @metadata : metadata information from callback
2686 *
2687 * RETURN : camera_metadata_t*
2688 * metadata in a format specified by fwk
2689 *==========================================================================*/
2690 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)2691 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
2692 (metadata_buffer_t *metadata)
2693 {
2694 CameraMetadata camMetadata;
2695 camera_metadata_t* resultMetadata;
2696 uint8_t *aeMode = NULL;
2697 int32_t *flashMode = NULL;
2698 int32_t *redeye = NULL;
2699
2700 uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
2701 uint8_t next_entry;
2702 while (curr_entry != CAM_INTF_PARM_MAX) {
2703 switch (curr_entry) {
2704 case CAM_INTF_META_AEC_STATE:{
2705 uint8_t *ae_state =
2706 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
2707 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
2708 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
2709 break;
2710 }
2711 case CAM_INTF_PARM_AEC_LOCK: {
2712 uint8_t *ae_lock =
2713 (uint8_t *)POINTER_OF(CAM_INTF_PARM_AEC_LOCK, metadata);
2714 camMetadata.update(ANDROID_CONTROL_AE_LOCK,
2715 ae_lock, 1);
2716 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_LOCK", __func__);
2717 break;
2718 }
2719 case CAM_INTF_PARM_FPS_RANGE: {
2720 int32_t fps_range[2];
2721 cam_fps_range_t * float_range =
2722 (cam_fps_range_t *)POINTER_OF(CAM_INTF_PARM_FPS_RANGE, metadata);
2723 fps_range[0] = (int32_t)float_range->min_fps;
2724 fps_range[1] = (int32_t)float_range->max_fps;
2725 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
2726 fps_range, 2);
2727 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
2728 __func__, fps_range[0], fps_range[1]);
2729 break;
2730 }
2731 case CAM_INTF_PARM_EV: {
2732 int32_t *expCompensation =
2733 (int32_t *)POINTER_OF(CAM_INTF_PARM_EV, metadata);
2734 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
2735 expCompensation, 1);
2736 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION",
2737 __func__);
2738 break;
2739 }
2740 case CAM_INTF_PARM_FOCUS_MODE:{
2741 uint8_t *focusMode =
2742 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
2743 uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
2744 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
2745 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
2746 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
2747 break;
2748 }
2749 case CAM_INTF_META_AF_STATE: {
2750 uint8_t *afState =
2751 (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
2752 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
2753 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
2754 break;
2755 }
2756 case CAM_INTF_PARM_WHITE_BALANCE: {
2757 uint8_t *whiteBalance =
2758 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
2759 uint8_t fwkWhiteBalanceMode =
2760 (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
2761 sizeof(WHITE_BALANCE_MODES_MAP)/
2762 sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
2763 camMetadata.update(ANDROID_CONTROL_AWB_MODE,
2764 &fwkWhiteBalanceMode, 1);
2765 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
2766 break;
2767 }
2768
2769 case CAM_INTF_META_AWB_STATE: {
2770 uint8_t *whiteBalanceState =
2771 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
2772 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
2773 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
2774 break;
2775 }
2776
2777
2778 case CAM_INTF_PARM_AWB_LOCK: {
2779 uint8_t *awb_lock =
2780 (uint8_t *)POINTER_OF(CAM_INTF_PARM_AWB_LOCK, metadata);
2781 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, awb_lock, 1);
2782 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_LOCK", __func__);
2783 break;
2784 }
2785 case CAM_INTF_META_PRECAPTURE_TRIGGER: {
2786 uint8_t *precaptureTrigger =
2787 (uint8_t *)POINTER_OF(CAM_INTF_META_PRECAPTURE_TRIGGER, metadata);
2788 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
2789 precaptureTrigger, 1);
2790 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER",
2791 __func__);
2792 break;
2793 }
2794 case CAM_INTF_META_AF_TRIGGER_NOTICE: {
2795 uint8_t *af_trigger =
2796 (uint8_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_NOTICE, metadata);
2797 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
2798 af_trigger, 1);
2799 ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER = %d",
2800 __func__, *af_trigger);
2801 break;
2802 }
2803 case CAM_INTF_META_AEC_MODE:{
2804 aeMode = (uint8_t*)
2805 POINTER_OF(CAM_INTF_META_AEC_MODE, metadata);
2806 break;
2807 }
2808 case CAM_INTF_PARM_LED_MODE:{
2809 flashMode = (int32_t*)
2810 POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
2811 break;
2812 }
2813 case CAM_INTF_PARM_REDEYE_REDUCTION:{
2814 redeye = (int32_t*)
2815 POINTER_OF(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
2816 break;
2817 }
2818 default:
2819 ALOGV("%s: Normal Metadata %d, do not process",
2820 __func__, curr_entry);
2821 break;
2822 }
2823 next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
2824 curr_entry = next_entry;
2825 }
2826
2827 uint8_t fwk_aeMode;
2828 if (redeye != NULL && *redeye == 1) {
2829 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
2830 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2831 } else if (flashMode != NULL &&
2832 ((*flashMode == CAM_FLASH_MODE_AUTO)||
2833 (*flashMode == CAM_FLASH_MODE_ON))) {
2834 fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
2835 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
2836 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2837 } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_ON) {
2838 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
2839 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2840 } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_OFF) {
2841 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
2842 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
2843 } else {
2844 ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%p!!!",__func__, redeye, flashMode, aeMode);
2845 }
2846
2847 resultMetadata = camMetadata.release();
2848 return resultMetadata;
2849 }
2850
2851 /*===========================================================================
2852 * FUNCTION : dumpMetadataToFile
2853 *
2854 * DESCRIPTION: Dumps tuning metadata to file system
2855 *
2856 * PARAMETERS :
2857 * @meta : tuning metadata
2858 * @dumpFrameCount : current dump frame count
2859 * @enabled : Enable mask
2860 *
2861 *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,int32_t enabled,const char * type,uint32_t frameNumber)2862 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
2863 uint32_t &dumpFrameCount,
2864 int32_t enabled,
2865 const char *type,
2866 uint32_t frameNumber)
2867 {
2868 uint32_t frm_num = 0;
2869
2870 //Some sanity checks
2871 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
2872 ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
2873 __func__,
2874 meta.tuning_sensor_data_size,
2875 TUNING_SENSOR_DATA_MAX);
2876 return;
2877 }
2878
2879 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
2880 ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
2881 __func__,
2882 meta.tuning_vfe_data_size,
2883 TUNING_VFE_DATA_MAX);
2884 return;
2885 }
2886
2887 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
2888 ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
2889 __func__,
2890 meta.tuning_cpp_data_size,
2891 TUNING_CPP_DATA_MAX);
2892 return;
2893 }
2894
2895 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
2896 ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
2897 __func__,
2898 meta.tuning_cac_data_size,
2899 TUNING_CAC_DATA_MAX);
2900 return;
2901 }
2902 //
2903
2904 if(enabled){
2905 frm_num = ((enabled & 0xffff0000) >> 16);
2906 if(frm_num == 0) {
2907 frm_num = 10; //default 10 frames
2908 }
2909 if(frm_num > 256) {
2910 frm_num = 256; //256 buffers cycle around
2911 }
2912 if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
2913 // reset frame count if cycling
2914 dumpFrameCount = 0;
2915 }
2916 ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
2917 if (dumpFrameCount < frm_num) {
2918 char timeBuf[FILENAME_MAX];
2919 char buf[FILENAME_MAX];
2920 memset(buf, 0, sizeof(buf));
2921 memset(timeBuf, 0, sizeof(timeBuf));
2922 time_t current_time;
2923 struct tm * timeinfo;
2924 time (¤t_time);
2925 timeinfo = localtime (¤t_time);
2926 strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
2927 String8 filePath(timeBuf);
2928 snprintf(buf,
2929 sizeof(buf),
2930 "%d_HAL_META_%s_%d.bin",
2931 dumpFrameCount,
2932 type,
2933 frameNumber);
2934 filePath.append(buf);
2935 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2936 if (file_fd >= 0) {
2937 int written_len = 0;
2938 meta.tuning_data_version = TUNING_DATA_VERSION;
2939 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
2940 written_len += write(file_fd, data, sizeof(uint32_t));
2941 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
2942 ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
2943 written_len += write(file_fd, data, sizeof(uint32_t));
2944 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
2945 ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
2946 written_len += write(file_fd, data, sizeof(uint32_t));
2947 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
2948 ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
2949 written_len += write(file_fd, data, sizeof(uint32_t));
2950 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
2951 ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
2952 written_len += write(file_fd, data, sizeof(uint32_t));
2953 int total_size = meta.tuning_sensor_data_size;
2954 data = (void *)((uint8_t *)&meta.data);
2955 written_len += write(file_fd, data, total_size);
2956 total_size = meta.tuning_vfe_data_size;
2957 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
2958 written_len += write(file_fd, data, total_size);
2959 total_size = meta.tuning_cpp_data_size;
2960 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
2961 written_len += write(file_fd, data, total_size);
2962 total_size = meta.tuning_cac_data_size;
2963 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
2964 written_len += write(file_fd, data, total_size);
2965 close(file_fd);
2966 }else {
2967 ALOGE("%s: fail t open file for image dumping", __func__);
2968 }
2969 dumpFrameCount++;
2970 }
2971 }
2972 }
2973
2974 /*===========================================================================
2975 * FUNCTION : cleanAndSortStreamInfo
2976 *
2977 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
2978 * and sort them such that raw stream is at the end of the list
2979 * This is a workaround for camera daemon constraint.
2980 *
2981 * PARAMETERS : None
2982 *
2983 *==========================================================================*/
cleanAndSortStreamInfo()2984 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
2985 {
2986 List<stream_info_t *> newStreamInfo;
2987
2988 /*clean up invalid streams*/
2989 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2990 it != mStreamInfo.end();) {
2991 if(((*it)->status) == INVALID){
2992 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
2993 delete channel;
2994 free(*it);
2995 it = mStreamInfo.erase(it);
2996 } else {
2997 it++;
2998 }
2999 }
3000
3001 // Move preview/video/callback/snapshot streams into newList
3002 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3003 it != mStreamInfo.end();) {
3004 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
3005 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
3006 newStreamInfo.push_back(*it);
3007 it = mStreamInfo.erase(it);
3008 } else
3009 it++;
3010 }
3011 // Move raw streams into newList
3012 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3013 it != mStreamInfo.end();) {
3014 newStreamInfo.push_back(*it);
3015 it = mStreamInfo.erase(it);
3016 }
3017
3018 mStreamInfo = newStreamInfo;
3019 }
3020
3021 /*===========================================================================
3022 * FUNCTION : extractJpegMetadata
3023 *
3024 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
3025 * JPEG metadata is cached in HAL, and return as part of capture
3026 * result when metadata is returned from camera daemon.
3027 *
3028 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
3029 * @request: capture request
3030 *
3031 *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)3032 void QCamera3HardwareInterface::extractJpegMetadata(
3033 CameraMetadata& jpegMetadata,
3034 const camera3_capture_request_t *request)
3035 {
3036 CameraMetadata frame_settings;
3037 frame_settings = request->settings;
3038
3039 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
3040 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
3041 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
3042 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
3043
3044 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
3045 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
3046 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
3047 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
3048
3049 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
3050 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
3051 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
3052 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
3053
3054 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
3055 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
3056 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
3057 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
3058
3059 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
3060 jpegMetadata.update(ANDROID_JPEG_QUALITY,
3061 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
3062 frame_settings.find(ANDROID_JPEG_QUALITY).count);
3063
3064 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
3065 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
3066 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
3067 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
3068
3069 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
3070 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
3071 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
3072 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
3073 }
3074
3075 /*===========================================================================
3076 * FUNCTION : convertToRegions
3077 *
3078 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
3079 *
3080 * PARAMETERS :
3081 * @rect : cam_rect_t struct to convert
3082 * @region : int32_t destination array
3083 * @weight : if we are converting from cam_area_t, weight is valid
3084 * else weight = -1
3085 *
3086 *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)3087 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
3088 region[0] = rect.left;
3089 region[1] = rect.top;
3090 region[2] = rect.left + rect.width;
3091 region[3] = rect.top + rect.height;
3092 if (weight > -1) {
3093 region[4] = weight;
3094 }
3095 }
3096
3097 /*===========================================================================
3098 * FUNCTION : convertFromRegions
3099 *
3100 * DESCRIPTION: helper method to convert from array to cam_rect_t
3101 *
3102 * PARAMETERS :
3103 * @rect : cam_rect_t struct to convert
3104 * @region : int32_t destination array
3105 * @weight : if we are converting from cam_area_t, weight is valid
3106 * else weight = -1
3107 *
3108 *==========================================================================*/
convertFromRegions(cam_area_t * roi,const camera_metadata_t * settings,uint32_t tag)3109 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
3110 const camera_metadata_t *settings,
3111 uint32_t tag){
3112 CameraMetadata frame_settings;
3113 frame_settings = settings;
3114 int32_t x_min = frame_settings.find(tag).data.i32[0];
3115 int32_t y_min = frame_settings.find(tag).data.i32[1];
3116 int32_t x_max = frame_settings.find(tag).data.i32[2];
3117 int32_t y_max = frame_settings.find(tag).data.i32[3];
3118 roi->weight = frame_settings.find(tag).data.i32[4];
3119 roi->rect.left = x_min;
3120 roi->rect.top = y_min;
3121 roi->rect.width = x_max - x_min;
3122 roi->rect.height = y_max - y_min;
3123 }
3124
3125 /*===========================================================================
3126 * FUNCTION : resetIfNeededROI
3127 *
3128 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
3129 * crop region
3130 *
3131 * PARAMETERS :
3132 * @roi : cam_area_t struct to resize
3133 * @scalerCropRegion : cam_crop_region_t region to compare against
3134 *
3135 *
3136 *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)3137 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
3138 const cam_crop_region_t* scalerCropRegion)
3139 {
3140 int32_t roi_x_max = roi->rect.width + roi->rect.left;
3141 int32_t roi_y_max = roi->rect.height + roi->rect.top;
3142 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
3143 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
3144 if ((roi_x_max < scalerCropRegion->left) ||
3145 (roi_y_max < scalerCropRegion->top) ||
3146 (roi->rect.left > crop_x_max) ||
3147 (roi->rect.top > crop_y_max)){
3148 return false;
3149 }
3150 if (roi->rect.left < scalerCropRegion->left) {
3151 roi->rect.left = scalerCropRegion->left;
3152 }
3153 if (roi->rect.top < scalerCropRegion->top) {
3154 roi->rect.top = scalerCropRegion->top;
3155 }
3156 if (roi_x_max > crop_x_max) {
3157 roi_x_max = crop_x_max;
3158 }
3159 if (roi_y_max > crop_y_max) {
3160 roi_y_max = crop_y_max;
3161 }
3162 roi->rect.width = roi_x_max - roi->rect.left;
3163 roi->rect.height = roi_y_max - roi->rect.top;
3164 return true;
3165 }
3166
3167 /*===========================================================================
3168 * FUNCTION : convertLandmarks
3169 *
3170 * DESCRIPTION: helper method to extract the landmarks from face detection info
3171 *
3172 * PARAMETERS :
3173 * @face : cam_rect_t struct to convert
3174 * @landmarks : int32_t destination array
3175 *
3176 *
3177 *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)3178 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
3179 {
3180 landmarks[0] = face.left_eye_center.x;
3181 landmarks[1] = face.left_eye_center.y;
3182 landmarks[2] = face.right_eye_center.x;
3183 landmarks[3] = face.right_eye_center.y;
3184 landmarks[4] = face.mouth_center.x;
3185 landmarks[5] = face.mouth_center.y;
3186 }
3187
3188 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
3189 /*===========================================================================
3190 * FUNCTION : initCapabilities
3191 *
3192 * DESCRIPTION: initialize camera capabilities in static data struct
3193 *
3194 * PARAMETERS :
3195 * @cameraId : camera Id
3196 *
3197 * RETURN : int32_t type of status
3198 * NO_ERROR -- success
3199 * none-zero failure code
3200 *==========================================================================*/
initCapabilities(int cameraId)3201 int QCamera3HardwareInterface::initCapabilities(int cameraId)
3202 {
3203 int rc = 0;
3204 mm_camera_vtbl_t *cameraHandle = NULL;
3205 QCamera3HeapMemory *capabilityHeap = NULL;
3206
3207 cameraHandle = camera_open(cameraId);
3208 if (!cameraHandle) {
3209 ALOGE("%s: camera_open failed", __func__);
3210 rc = -1;
3211 goto open_failed;
3212 }
3213
3214 capabilityHeap = new QCamera3HeapMemory();
3215 if (capabilityHeap == NULL) {
3216 ALOGE("%s: creation of capabilityHeap failed", __func__);
3217 goto heap_creation_failed;
3218 }
3219 /* Allocate memory for capability buffer */
3220 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
3221 if(rc != OK) {
3222 ALOGE("%s: No memory for cappability", __func__);
3223 goto allocate_failed;
3224 }
3225
3226 /* Map memory for capability buffer */
3227 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
3228 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
3229 CAM_MAPPING_BUF_TYPE_CAPABILITY,
3230 capabilityHeap->getFd(0),
3231 sizeof(cam_capability_t));
3232 if(rc < 0) {
3233 ALOGE("%s: failed to map capability buffer", __func__);
3234 goto map_failed;
3235 }
3236
3237 /* Query Capability */
3238 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
3239 if(rc < 0) {
3240 ALOGE("%s: failed to query capability",__func__);
3241 goto query_failed;
3242 }
3243 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
3244 if (!gCamCapability[cameraId]) {
3245 ALOGE("%s: out of memory", __func__);
3246 goto query_failed;
3247 }
3248 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
3249 sizeof(cam_capability_t));
3250 rc = 0;
3251
3252 query_failed:
3253 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
3254 CAM_MAPPING_BUF_TYPE_CAPABILITY);
3255 map_failed:
3256 capabilityHeap->deallocate();
3257 allocate_failed:
3258 delete capabilityHeap;
3259 heap_creation_failed:
3260 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
3261 cameraHandle = NULL;
3262 open_failed:
3263 return rc;
3264 }
3265
3266 /*===========================================================================
3267 * FUNCTION : initParameters
3268 *
3269 * DESCRIPTION: initialize camera parameters
3270 *
3271 * PARAMETERS :
3272 *
3273 * RETURN : int32_t type of status
3274 * NO_ERROR -- success
3275 * none-zero failure code
3276 *==========================================================================*/
initParameters()3277 int QCamera3HardwareInterface::initParameters()
3278 {
3279 int rc = 0;
3280
3281 //Allocate Set Param Buffer
3282 mParamHeap = new QCamera3HeapMemory();
3283 rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
3284 if(rc != OK) {
3285 rc = NO_MEMORY;
3286 ALOGE("Failed to allocate SETPARM Heap memory");
3287 delete mParamHeap;
3288 mParamHeap = NULL;
3289 return rc;
3290 }
3291
3292 //Map memory for parameters buffer
3293 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
3294 CAM_MAPPING_BUF_TYPE_PARM_BUF,
3295 mParamHeap->getFd(0),
3296 sizeof(metadata_buffer_t));
3297 if(rc < 0) {
3298 ALOGE("%s:failed to map SETPARM buffer",__func__);
3299 rc = FAILED_TRANSACTION;
3300 mParamHeap->deallocate();
3301 delete mParamHeap;
3302 mParamHeap = NULL;
3303 return rc;
3304 }
3305
3306 mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
3307
3308 mPrevParameters = (metadata_buffer_t*)malloc(sizeof(metadata_buffer_t));
3309 return rc;
3310 }
3311
3312 /*===========================================================================
3313 * FUNCTION : deinitParameters
3314 *
3315 * DESCRIPTION: de-initialize camera parameters
3316 *
3317 * PARAMETERS :
3318 *
3319 * RETURN : NONE
3320 *==========================================================================*/
deinitParameters()3321 void QCamera3HardwareInterface::deinitParameters()
3322 {
3323 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
3324 CAM_MAPPING_BUF_TYPE_PARM_BUF);
3325
3326 mParamHeap->deallocate();
3327 delete mParamHeap;
3328 mParamHeap = NULL;
3329
3330 mParameters = NULL;
3331
3332 free(mPrevParameters);
3333 mPrevParameters = NULL;
3334 }
3335
3336 /*===========================================================================
3337 * FUNCTION : calcMaxJpegSize
3338 *
3339 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
3340 *
3341 * PARAMETERS :
3342 *
3343 * RETURN : max_jpeg_size
3344 *==========================================================================*/
calcMaxJpegSize()3345 int QCamera3HardwareInterface::calcMaxJpegSize()
3346 {
3347 int32_t max_jpeg_size = 0;
3348 int temp_width, temp_height;
3349 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
3350 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
3351 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
3352 if (temp_width * temp_height > max_jpeg_size ) {
3353 max_jpeg_size = temp_width * temp_height;
3354 }
3355 }
3356 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3357 return max_jpeg_size;
3358 }
3359
3360 /*===========================================================================
3361 * FUNCTION : initStaticMetadata
3362 *
3363 * DESCRIPTION: initialize the static metadata
3364 *
3365 * PARAMETERS :
3366 * @cameraId : camera Id
3367 *
3368 * RETURN : int32_t type of status
3369 * 0 -- success
3370 * non-zero failure code
3371 *==========================================================================*/
initStaticMetadata(int cameraId)3372 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
3373 {
3374 int rc = 0;
3375 CameraMetadata staticInfo;
3376
3377 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
3378
3379 /* android.info: hardware level */
3380 uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
3381 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
3382 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
3383 &supportedHardwareLevel, 1);
3384 /*HAL 3 only*/
3385 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
3386 &gCamCapability[cameraId]->min_focus_distance, 1);
3387
3388 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
3389 &gCamCapability[cameraId]->hyper_focal_distance, 1);
3390
3391 /*should be using focal lengths but sensor doesn't provide that info now*/
3392 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
3393 &gCamCapability[cameraId]->focal_length,
3394 1);
3395
3396 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
3397 gCamCapability[cameraId]->apertures,
3398 gCamCapability[cameraId]->apertures_count);
3399
3400 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
3401 gCamCapability[cameraId]->filter_densities,
3402 gCamCapability[cameraId]->filter_densities_count);
3403
3404
3405 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
3406 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
3407 gCamCapability[cameraId]->optical_stab_modes_count);
3408
3409 staticInfo.update(ANDROID_LENS_POSITION,
3410 gCamCapability[cameraId]->lens_position,
3411 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
3412
3413 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
3414 gCamCapability[cameraId]->lens_shading_map_size.height};
3415 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
3416 lens_shading_map_size,
3417 sizeof(lens_shading_map_size)/sizeof(int32_t));
3418
3419 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
3420 gCamCapability[cameraId]->sensor_physical_size, 2);
3421
3422 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
3423 gCamCapability[cameraId]->exposure_time_range, 2);
3424
3425 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
3426 &gCamCapability[cameraId]->max_frame_duration, 1);
3427
3428 camera_metadata_rational baseGainFactor = {
3429 gCamCapability[cameraId]->base_gain_factor.numerator,
3430 gCamCapability[cameraId]->base_gain_factor.denominator};
3431 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
3432 &baseGainFactor, 1);
3433
3434 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
3435 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
3436
3437 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
3438 gCamCapability[cameraId]->pixel_array_size.height};
3439 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
3440 pixel_array_size, 2);
3441
3442 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
3443 gCamCapability[cameraId]->active_array_size.top,
3444 gCamCapability[cameraId]->active_array_size.width,
3445 gCamCapability[cameraId]->active_array_size.height};
3446 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
3447 active_array_size, 4);
3448
3449 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
3450 &gCamCapability[cameraId]->white_level, 1);
3451
3452 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
3453 gCamCapability[cameraId]->black_level_pattern, 4);
3454
3455 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
3456 &gCamCapability[cameraId]->flash_charge_duration, 1);
3457
3458 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
3459 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
3460
3461 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
3462 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
3463 (int32_t*)&maxFaces, 1);
3464
3465 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
3466 &gCamCapability[cameraId]->histogram_size, 1);
3467
3468 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
3469 &gCamCapability[cameraId]->max_histogram_count, 1);
3470
3471 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
3472 gCamCapability[cameraId]->sharpness_map_size.height};
3473
3474 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
3475 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
3476
3477 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
3478 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
3479
3480 int32_t scalar_formats[] = {
3481 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
3482 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
3483 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
3484 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
3485 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
3486 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
3487 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
3488 scalar_formats,
3489 scalar_formats_count);
3490
3491 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
3492 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
3493 gCamCapability[cameraId]->picture_sizes_tbl_cnt,
3494 available_processed_sizes);
3495 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
3496 available_processed_sizes,
3497 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
3498
3499 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
3500 makeTable(gCamCapability[cameraId]->raw_dim,
3501 gCamCapability[cameraId]->supported_raw_dim_cnt,
3502 available_raw_sizes);
3503 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
3504 available_raw_sizes,
3505 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
3506
3507 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
3508 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
3509 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
3510 available_fps_ranges);
3511 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
3512 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
3513
3514 camera_metadata_rational exposureCompensationStep = {
3515 gCamCapability[cameraId]->exp_compensation_step.numerator,
3516 gCamCapability[cameraId]->exp_compensation_step.denominator};
3517 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
3518 &exposureCompensationStep, 1);
3519
3520 /*TO DO*/
3521 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
3522 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
3523 availableVstabModes, sizeof(availableVstabModes));
3524
3525 /*HAL 1 and HAL 3 common*/
3526 float maxZoom = 4;
3527 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
3528 &maxZoom, 1);
3529
3530 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
3531 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
3532
3533 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
3534 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
3535 max3aRegions[2] = 0; /* AF not supported */
3536 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
3537 max3aRegions, 3);
3538
3539 uint8_t availableFaceDetectModes[] = {
3540 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
3541 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
3542 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
3543 availableFaceDetectModes,
3544 sizeof(availableFaceDetectModes));
3545
3546 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
3547 gCamCapability[cameraId]->exposure_compensation_max};
3548 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
3549 exposureCompensationRange,
3550 sizeof(exposureCompensationRange)/sizeof(int32_t));
3551
3552 uint8_t lensFacing = (facingBack) ?
3553 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
3554 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
3555
3556 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
3557 available_processed_sizes,
3558 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
3559
3560 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
3561 available_thumbnail_sizes,
3562 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
3563
3564 /*android.scaler.availableStreamConfigurations*/
3565 int32_t max_stream_configs_size =
3566 gCamCapability[cameraId]->picture_sizes_tbl_cnt *
3567 sizeof(scalar_formats)/sizeof(int32_t) * 4;
3568 int32_t available_stream_configs[max_stream_configs_size];
3569 int idx = 0;
3570 for (int j = 0; j < scalar_formats_count; j++) {
3571 switch (scalar_formats[j]) {
3572 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3573 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3574 for (int i = 0;
3575 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3576 available_stream_configs[idx] = scalar_formats[j];
3577 available_stream_configs[idx+1] =
3578 gCamCapability[cameraId]->raw_dim[i].width;
3579 available_stream_configs[idx+2] =
3580 gCamCapability[cameraId]->raw_dim[i].height;
3581 available_stream_configs[idx+3] =
3582 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3583 idx+=4;
3584 }
3585 break;
3586 default:
3587 for (int i = 0;
3588 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3589 available_stream_configs[idx] = scalar_formats[j];
3590 available_stream_configs[idx+1] =
3591 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3592 available_stream_configs[idx+2] =
3593 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3594 available_stream_configs[idx+3] =
3595 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
3596 idx+=4;
3597 }
3598
3599
3600 break;
3601 }
3602 }
3603 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
3604 available_stream_configs, idx);
3605 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
3606 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
3607
3608 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
3609 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
3610
3611 /* android.scaler.availableMinFrameDurations */
3612 int64_t available_min_durations[max_stream_configs_size];
3613 idx = 0;
3614 for (int j = 0; j < scalar_formats_count; j++) {
3615 switch (scalar_formats[j]) {
3616 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
3617 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
3618 for (int i = 0;
3619 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
3620 available_min_durations[idx] = scalar_formats[j];
3621 available_min_durations[idx+1] =
3622 gCamCapability[cameraId]->raw_dim[i].width;
3623 available_min_durations[idx+2] =
3624 gCamCapability[cameraId]->raw_dim[i].height;
3625 available_min_durations[idx+3] =
3626 gCamCapability[cameraId]->raw_min_duration[i];
3627 idx+=4;
3628 }
3629 break;
3630 default:
3631 for (int i = 0;
3632 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3633 available_min_durations[idx] = scalar_formats[j];
3634 available_min_durations[idx+1] =
3635 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3636 available_min_durations[idx+2] =
3637 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3638 available_min_durations[idx+3] =
3639 gCamCapability[cameraId]->picture_min_duration[i];
3640 idx+=4;
3641 }
3642 break;
3643 }
3644 }
3645 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
3646 &available_min_durations[0], idx);
3647
3648 int32_t max_jpeg_size = 0;
3649 int temp_width, temp_height;
3650 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
3651 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
3652 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
3653 if (temp_width * temp_height > max_jpeg_size ) {
3654 max_jpeg_size = temp_width * temp_height;
3655 }
3656 }
3657 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
3658 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
3659 &max_jpeg_size, 1);
3660
3661 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
3662 size_t size = 0;
3663 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
3664 int32_t val = lookupFwkName(EFFECT_MODES_MAP,
3665 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
3666 gCamCapability[cameraId]->supported_effects[i]);
3667 if (val != NAME_NOT_FOUND) {
3668 avail_effects[size] = (uint8_t)val;
3669 size++;
3670 }
3671 }
3672 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
3673 avail_effects,
3674 size);
3675
3676 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
3677 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
3678 int32_t supported_scene_modes_cnt = 0;
3679 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
3680 int32_t val = lookupFwkName(SCENE_MODES_MAP,
3681 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
3682 gCamCapability[cameraId]->supported_scene_modes[i]);
3683 if (val != NAME_NOT_FOUND) {
3684 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
3685 supported_indexes[supported_scene_modes_cnt] = i;
3686 supported_scene_modes_cnt++;
3687 }
3688 }
3689
3690 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
3691 avail_scene_modes,
3692 supported_scene_modes_cnt);
3693
3694 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
3695 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
3696 supported_scene_modes_cnt,
3697 scene_mode_overrides,
3698 supported_indexes,
3699 cameraId);
3700 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
3701 scene_mode_overrides,
3702 supported_scene_modes_cnt*3);
3703
3704 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
3705 size = 0;
3706 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
3707 int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
3708 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
3709 gCamCapability[cameraId]->supported_antibandings[i]);
3710 if (val != NAME_NOT_FOUND) {
3711 avail_antibanding_modes[size] = (uint8_t)val;
3712 size++;
3713 }
3714
3715 }
3716 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
3717 avail_antibanding_modes,
3718 size);
3719
3720 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
3721 size = 0;
3722 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
3723 int32_t val = lookupFwkName(FOCUS_MODES_MAP,
3724 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
3725 gCamCapability[cameraId]->supported_focus_modes[i]);
3726 if (val != NAME_NOT_FOUND) {
3727 avail_af_modes[size] = (uint8_t)val;
3728 size++;
3729 }
3730 }
3731 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
3732 avail_af_modes,
3733 size);
3734
3735 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
3736 size = 0;
3737 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
3738 int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
3739 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
3740 gCamCapability[cameraId]->supported_white_balances[i]);
3741 if (val != NAME_NOT_FOUND) {
3742 avail_awb_modes[size] = (uint8_t)val;
3743 size++;
3744 }
3745 }
3746 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
3747 avail_awb_modes,
3748 size);
3749
3750 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
3751 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
3752 available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
3753
3754 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
3755 available_flash_levels,
3756 gCamCapability[cameraId]->supported_flash_firing_level_cnt);
3757
3758 uint8_t flashAvailable;
3759 if (gCamCapability[cameraId]->flash_available)
3760 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
3761 else
3762 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
3763 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
3764 &flashAvailable, 1);
3765
3766 uint8_t avail_ae_modes[5];
3767 size = 0;
3768 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
3769 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
3770 size++;
3771 }
3772 if (flashAvailable) {
3773 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
3774 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
3775 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
3776 }
3777 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
3778 avail_ae_modes,
3779 size);
3780
3781 int32_t sensitivity_range[2];
3782 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
3783 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
3784 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
3785 sensitivity_range,
3786 sizeof(sensitivity_range) / sizeof(int32_t));
3787
3788 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
3789 &gCamCapability[cameraId]->max_analog_sensitivity,
3790 1);
3791
3792 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
3793 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
3794 &sensor_orientation,
3795 1);
3796
3797 int32_t max_output_streams[3] = {1, 3, 1};
3798 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
3799 max_output_streams,
3800 3);
3801
3802 uint8_t avail_leds = 0;
3803 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
3804 &avail_leds, 0);
3805
3806 uint8_t focus_dist_calibrated;
3807 int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
3808 sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
3809 gCamCapability[cameraId]->focus_dist_calibrated);
3810 if (val != NAME_NOT_FOUND) {
3811 focus_dist_calibrated = (uint8_t)val;
3812 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
3813 &focus_dist_calibrated, 1);
3814 }
3815
3816 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
3817 size = 0;
3818 for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
3819 i++) {
3820 int32_t val = lookupFwkName(TEST_PATTERN_MAP,
3821 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
3822 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
3823 if (val != NAME_NOT_FOUND) {
3824 avail_testpattern_modes[size] = val;
3825 size++;
3826 }
3827 }
3828 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
3829 avail_testpattern_modes,
3830 size);
3831
3832 uint8_t max_pipeline_depth = kMaxInFlight + EMPTY_PIPELINE_DELAY;
3833 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
3834 &max_pipeline_depth,
3835 1);
3836
3837 int32_t partial_result_count = 2;
3838 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
3839 &partial_result_count,
3840 1);
3841
3842 uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
3843 uint8_t available_capabilities_count = 0;
3844 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
3845 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
3846 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
3847 if (facingBack) {
3848 available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
3849 }
3850 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
3851 available_capabilities,
3852 available_capabilities_count);
3853
3854 int32_t max_input_streams = 0;
3855 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
3856 &max_input_streams,
3857 1);
3858
3859 int32_t io_format_map[] = {};
3860 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
3861 io_format_map, 0);
3862
3863 int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
3864 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
3865 &max_latency,
3866 1);
3867
3868 float optical_axis_angle[2];
3869 optical_axis_angle[0] = 0; //need to verify
3870 optical_axis_angle[1] = 0; //need to verify
3871 staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
3872 optical_axis_angle,
3873 2);
3874
3875 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
3876 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
3877 available_hot_pixel_modes,
3878 1);
3879
3880 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
3881 ANDROID_EDGE_MODE_FAST};
3882 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
3883 available_edge_modes,
3884 2);
3885
3886 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
3887 ANDROID_NOISE_REDUCTION_MODE_FAST};
3888 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
3889 available_noise_red_modes,
3890 2);
3891
3892 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
3893 ANDROID_TONEMAP_MODE_FAST};
3894 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
3895 available_tonemap_modes,
3896 2);
3897
3898 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
3899 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
3900 available_hot_pixel_map_modes,
3901 1);
3902
3903 uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3904 sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3905 gCamCapability[cameraId]->reference_illuminant1);
3906 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
3907 &fwkReferenceIlluminant, 1);
3908
3909 fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
3910 sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
3911 gCamCapability[cameraId]->reference_illuminant2);
3912 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
3913 &fwkReferenceIlluminant, 1);
3914
3915 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
3916 (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
3917 3*3);
3918
3919 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
3920 (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
3921 3*3);
3922
3923 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
3924 (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
3925 3*3);
3926
3927 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
3928 (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
3929 3*3);
3930
3931 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
3932 (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
3933 3*3);
3934
3935 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
3936 (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
3937 3*3);
3938
3939 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
3940 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
3941 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
3942 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
3943 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
3944 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3945 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
3946 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
3947 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
3948 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
3949 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
3950 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
3951 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3952 ANDROID_JPEG_GPS_COORDINATES,
3953 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
3954 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
3955 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
3956 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3957 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
3958 ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
3959 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
3960 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
3961 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
3962 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
3963 ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
3964 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
3965 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
3966 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
3967 ANDROID_BLACK_LEVEL_LOCK };
3968
3969 size_t request_keys_cnt =
3970 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
3971 //NOTE: Please increase available_request_keys array size before
3972 //adding any new entries.
3973 int32_t available_request_keys[request_keys_cnt+1];
3974 memcpy(available_request_keys, request_keys_basic,
3975 sizeof(request_keys_basic));
3976 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
3977 available_request_keys[request_keys_cnt++] =
3978 ANDROID_CONTROL_AF_REGIONS;
3979 }
3980 //NOTE: Please increase available_request_keys array size before
3981 //adding any new entries.
3982 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
3983 available_request_keys, request_keys_cnt);
3984
3985 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
3986 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
3987 ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
3988 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
3989 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
3990 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
3991 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
3992 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
3993 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
3994 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
3995 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
3996 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
3997 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
3998 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
3999 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
4000 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4001 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
4002 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
4003 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
4004 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
4005 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4006 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
4007 ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
4008 ANDROID_STATISTICS_FACE_SCORES,
4009 ANDROID_SENSOR_NOISE_PROFILE,
4010 ANDROID_SENSOR_GREEN_SPLIT};
4011 size_t result_keys_cnt =
4012 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
4013 //NOTE: Please increase available_result_keys array size before
4014 //adding any new entries.
4015 int32_t available_result_keys[result_keys_cnt+1];
4016 memcpy(available_result_keys, result_keys_basic,
4017 sizeof(result_keys_basic));
4018 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
4019 available_result_keys[result_keys_cnt++] =
4020 ANDROID_CONTROL_AF_REGIONS;
4021 }
4022 //NOTE: Please increase available_result_keys array size before
4023 //adding any new entries.
4024
4025 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4026 available_result_keys, result_keys_cnt);
4027
4028 int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
4029 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
4030 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
4031 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
4032 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
4033 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
4034 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
4035 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
4036 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4037 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
4038 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
4039 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
4040 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
4041 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
4042 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
4043 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
4044 ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
4045 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
4046 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
4047 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
4048 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
4049 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
4050 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
4051 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
4052 ANDROID_SCALER_CROPPING_TYPE,
4053 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
4054 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
4055 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
4056 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
4057 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
4058 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
4059 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
4060 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
4061 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
4062 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
4063 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4064 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
4065 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
4066 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
4067 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
4068 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
4069 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
4070 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
4071 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
4072 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
4073 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
4074 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
4075 ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
4076 ANDROID_SYNC_MAX_LATENCY };
4077 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
4078 available_characteristics_keys,
4079 sizeof(available_characteristics_keys)/sizeof(int32_t));
4080
4081 /*available stall durations depend on the hw + sw and will be different for different devices */
4082 /*have to add for raw after implementation*/
4083 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
4084 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
4085
4086 size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
4087 int64_t available_stall_durations[available_stall_size];
4088 idx = 0;
4089 for (uint32_t j = 0; j < stall_formats_count; j++) {
4090 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
4091 for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
4092 available_stall_durations[idx] = stall_formats[j];
4093 available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
4094 available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
4095 available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
4096 idx+=4;
4097 }
4098 } else {
4099 for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4100 available_stall_durations[idx] = stall_formats[j];
4101 available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
4102 available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
4103 available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
4104 idx+=4;
4105 }
4106 }
4107 }
4108 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
4109 available_stall_durations,
4110 idx);
4111
4112 uint8_t available_correction_modes[] =
4113 {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
4114 staticInfo.update(
4115 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
4116 available_correction_modes,
4117 1);
4118
4119 uint8_t sensor_timestamp_source[] =
4120 {ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN};
4121 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
4122 sensor_timestamp_source,
4123 1);
4124
4125 //QCAMERA3_OPAQUE_RAW
4126 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4127 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4128 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
4129 case LEGACY_RAW:
4130 if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4131 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
4132 else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4133 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
4134 else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4135 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
4136 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
4137 break;
4138 case MIPI_RAW:
4139 if (gCamCapability[cameraId]->white_level == (1<<8)-1)
4140 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
4141 else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
4142 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
4143 else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
4144 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
4145 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
4146 break;
4147 default:
4148 ALOGE("%s: unknown opaque_raw_format %d", __func__,
4149 gCamCapability[cameraId]->opaque_raw_fmt);
4150 break;
4151 }
4152 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
4153
4154 int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
4155 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
4156 cam_stream_buf_plane_info_t buf_planes;
4157 strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
4158 strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
4159 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
4160 &gCamCapability[cameraId]->padding_info, &buf_planes);
4161 strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
4162 }
4163 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
4164 3*gCamCapability[cameraId]->supported_raw_dim_cnt);
4165
4166 gStaticMetadata[cameraId] = staticInfo.release();
4167 return rc;
4168 }
4169
4170 /*===========================================================================
4171 * FUNCTION : makeTable
4172 *
4173 * DESCRIPTION: make a table of sizes
4174 *
4175 * PARAMETERS :
4176 *
4177 *
4178 *==========================================================================*/
makeTable(cam_dimension_t * dimTable,uint8_t size,int32_t * sizeTable)4179 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
4180 int32_t* sizeTable)
4181 {
4182 int j = 0;
4183 for (int i = 0; i < size; i++) {
4184 sizeTable[j] = dimTable[i].width;
4185 sizeTable[j+1] = dimTable[i].height;
4186 j+=2;
4187 }
4188 }
4189
4190 /*===========================================================================
4191 * FUNCTION : makeFPSTable
4192 *
4193 * DESCRIPTION: make a table of fps ranges
4194 *
4195 * PARAMETERS :
4196 *
4197 *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,uint8_t size,int32_t * fpsRangesTable)4198 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
4199 int32_t* fpsRangesTable)
4200 {
4201 int j = 0;
4202 for (int i = 0; i < size; i++) {
4203 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
4204 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
4205 j+=2;
4206 }
4207 }
4208
4209 /*===========================================================================
4210 * FUNCTION : makeOverridesList
4211 *
4212 * DESCRIPTION: make a list of scene mode overrides
4213 *
4214 * PARAMETERS :
4215 *
4216 *
4217 *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,uint8_t size,uint8_t * overridesList,uint8_t * supported_indexes,int camera_id)4218 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
4219 uint8_t size, uint8_t* overridesList,
4220 uint8_t* supported_indexes,
4221 int camera_id)
4222 {
4223 /*daemon will give a list of overrides for all scene modes.
4224 However we should send the fwk only the overrides for the scene modes
4225 supported by the framework*/
4226 int j = 0, index = 0, supt = 0;
4227 uint8_t focus_override;
4228 for (int i = 0; i < size; i++) {
4229 supt = 0;
4230 index = supported_indexes[i];
4231 overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
4232 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
4233 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
4234 overridesTable[index].awb_mode);
4235 focus_override = (uint8_t)overridesTable[index].af_mode;
4236 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
4237 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
4238 supt = 1;
4239 break;
4240 }
4241 }
4242 if (supt) {
4243 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
4244 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
4245 focus_override);
4246 } else {
4247 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
4248 }
4249 j+=3;
4250 }
4251 }
4252
4253 /*===========================================================================
4254 * FUNCTION : getPreviewHalPixelFormat
4255 *
4256 * DESCRIPTION: convert the format to type recognized by framework
4257 *
4258 * PARAMETERS : format : the format from backend
4259 *
4260 ** RETURN : format recognized by framework
4261 *
4262 *==========================================================================*/
getScalarFormat(int32_t format)4263 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
4264 {
4265 int32_t halPixelFormat;
4266
4267 switch (format) {
4268 case CAM_FORMAT_YUV_420_NV12:
4269 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4270 break;
4271 case CAM_FORMAT_YUV_420_NV21:
4272 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4273 break;
4274 case CAM_FORMAT_YUV_420_NV21_ADRENO:
4275 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
4276 break;
4277 case CAM_FORMAT_YUV_420_YV12:
4278 halPixelFormat = HAL_PIXEL_FORMAT_YV12;
4279 break;
4280 case CAM_FORMAT_YUV_422_NV16:
4281 case CAM_FORMAT_YUV_422_NV61:
4282 default:
4283 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4284 break;
4285 }
4286 return halPixelFormat;
4287 }
4288 /*===========================================================================
4289 * FUNCTION : computeNoiseModelEntryS
4290 *
4291 * DESCRIPTION: function to map a given sensitivity to the S noise
4292 * model parameters in the DNG noise model.
4293 *
4294 * PARAMETERS : sens : the sensor sensitivity
4295 *
4296 ** RETURN : S (sensor amplification) noise
4297 *
4298 *==========================================================================*/
4299
computeNoiseModelEntryS(int32_t sens)4300 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
4301 double s = 1.693069e-06 * sens + 3.480007e-05;
4302 return s < 0.0 ? 0.0 : s;
4303 }
4304
4305 /*===========================================================================
4306 * FUNCTION : computeNoiseModelEntryO
4307 *
4308 * DESCRIPTION: function to map a given sensitivity to the O noise
4309 * model parameters in the DNG noise model.
4310 *
4311 * PARAMETERS : sens : the sensor sensitivity
4312 *
4313 ** RETURN : O (sensor readout) noise
4314 *
4315 *==========================================================================*/
4316
computeNoiseModelEntryO(int32_t sens)4317 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
4318 double o = 1.301416e-07 * sens + -2.262256e-04;
4319 return o < 0.0 ? 0.0 : o
4320 ;}
4321
4322 /*===========================================================================
4323 * FUNCTION : getSensorSensitivity
4324 *
4325 * DESCRIPTION: convert iso_mode to an integer value
4326 *
4327 * PARAMETERS : iso_mode : the iso_mode supported by sensor
4328 *
4329 ** RETURN : sensitivity supported by sensor
4330 *
4331 *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)4332 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
4333 {
4334 int32_t sensitivity;
4335
4336 switch (iso_mode) {
4337 case CAM_ISO_MODE_100:
4338 sensitivity = 100;
4339 break;
4340 case CAM_ISO_MODE_200:
4341 sensitivity = 200;
4342 break;
4343 case CAM_ISO_MODE_400:
4344 sensitivity = 400;
4345 break;
4346 case CAM_ISO_MODE_800:
4347 sensitivity = 800;
4348 break;
4349 case CAM_ISO_MODE_1600:
4350 sensitivity = 1600;
4351 break;
4352 default:
4353 sensitivity = -1;
4354 break;
4355 }
4356 return sensitivity;
4357 }
4358
4359 /*===========================================================================
4360 * FUNCTION : AddSetMetaEntryToBatch
4361 *
4362 * DESCRIPTION: add set parameter entry into batch
4363 *
4364 * PARAMETERS :
4365 * @p_table : ptr to parameter buffer
4366 * @paramType : parameter type
4367 * @paramLength : length of parameter value
4368 * @paramValue : ptr to parameter value
4369 *
4370 * RETURN : int32_t type of status
4371 * NO_ERROR -- success
4372 * none-zero failure code
4373 *==========================================================================*/
AddSetMetaEntryToBatch(metadata_buffer_t * p_table,unsigned int paramType,uint32_t paramLength,void * paramValue)4374 int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
4375 unsigned int paramType,
4376 uint32_t paramLength,
4377 void *paramValue)
4378 {
4379 int position = paramType;
4380 int current, next;
4381
4382 /*************************************************************************
4383 * Code to take care of linking next flags *
4384 *************************************************************************/
4385 current = GET_FIRST_PARAM_ID(p_table);
4386 if (position == current){
4387 //DO NOTHING
4388 } else if (position < current){
4389 SET_NEXT_PARAM_ID(position, p_table, current);
4390 SET_FIRST_PARAM_ID(p_table, position);
4391 } else {
4392 /* Search for the position in the linked list where we need to slot in*/
4393 while (position > GET_NEXT_PARAM_ID(current, p_table))
4394 current = GET_NEXT_PARAM_ID(current, p_table);
4395
4396 /*If node already exists no need to alter linking*/
4397 if (position != GET_NEXT_PARAM_ID(current, p_table)) {
4398 next = GET_NEXT_PARAM_ID(current, p_table);
4399 SET_NEXT_PARAM_ID(current, p_table, position);
4400 SET_NEXT_PARAM_ID(position, p_table, next);
4401 }
4402 }
4403
4404 /*************************************************************************
4405 * Copy contents into entry *
4406 *************************************************************************/
4407
4408 if (paramLength > sizeof(parm_type_t)) {
4409 ALOGE("%s:Size of input larger than max entry size",__func__);
4410 return BAD_VALUE;
4411 }
4412 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
4413 SET_PARM_VALID_BIT(paramType,p_table,1);
4414 return NO_ERROR;
4415 }
4416
4417 /*===========================================================================
4418 * FUNCTION : lookupFwkName
4419 *
4420 * DESCRIPTION: In case the enum is not same in fwk and backend
4421 * make sure the parameter is correctly propogated
4422 *
4423 * PARAMETERS :
4424 * @arr : map between the two enums
4425 * @len : len of the map
4426 * @hal_name : name of the hal_parm to map
4427 *
4428 * RETURN : int type of status
4429 * fwk_name -- success
4430 * none-zero failure code
4431 *==========================================================================*/
lookupFwkName(const QCameraMap arr[],int len,int hal_name)4432 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
4433 int len, int hal_name)
4434 {
4435
4436 for (int i = 0; i < len; i++) {
4437 if (arr[i].hal_name == hal_name)
4438 return arr[i].fwk_name;
4439 }
4440
4441 /* Not able to find matching framework type is not necessarily
4442 * an error case. This happens when mm-camera supports more attributes
4443 * than the frameworks do */
4444 ALOGD("%s: Cannot find matching framework type", __func__);
4445 return NAME_NOT_FOUND;
4446 }
4447
4448 /*===========================================================================
4449 * FUNCTION : lookupHalName
4450 *
4451 * DESCRIPTION: In case the enum is not same in fwk and backend
4452 * make sure the parameter is correctly propogated
4453 *
4454 * PARAMETERS :
4455 * @arr : map between the two enums
4456 * @len : len of the map
4457 * @fwk_name : name of the hal_parm to map
4458 *
4459 * RETURN : int32_t type of status
4460 * hal_name -- success
4461 * none-zero failure code
4462 *==========================================================================*/
lookupHalName(const QCameraMap arr[],int len,unsigned int fwk_name)4463 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
4464 int len, unsigned int fwk_name)
4465 {
4466 for (int i = 0; i < len; i++) {
4467 if (arr[i].fwk_name == fwk_name)
4468 return arr[i].hal_name;
4469 }
4470 ALOGE("%s: Cannot find matching hal type", __func__);
4471 return NAME_NOT_FOUND;
4472 }
4473
4474 /*===========================================================================
4475 * FUNCTION : getCapabilities
4476 *
4477 * DESCRIPTION: query camera capabilities
4478 *
4479 * PARAMETERS :
4480 * @cameraId : camera Id
4481 * @info : camera info struct to be filled in with camera capabilities
4482 *
4483 * RETURN : int32_t type of status
4484 * NO_ERROR -- success
4485 * none-zero failure code
4486 *==========================================================================*/
getCamInfo(int cameraId,struct camera_info * info)4487 int QCamera3HardwareInterface::getCamInfo(int cameraId,
4488 struct camera_info *info)
4489 {
4490 int rc = 0;
4491
4492 if (NULL == gCamCapability[cameraId]) {
4493 rc = initCapabilities(cameraId);
4494 if (rc < 0) {
4495 //pthread_mutex_unlock(&g_camlock);
4496 return rc;
4497 }
4498 }
4499
4500 if (NULL == gStaticMetadata[cameraId]) {
4501 rc = initStaticMetadata(cameraId);
4502 if (rc < 0) {
4503 return rc;
4504 }
4505 }
4506
4507 switch(gCamCapability[cameraId]->position) {
4508 case CAM_POSITION_BACK:
4509 info->facing = CAMERA_FACING_BACK;
4510 break;
4511
4512 case CAM_POSITION_FRONT:
4513 info->facing = CAMERA_FACING_FRONT;
4514 break;
4515
4516 default:
4517 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
4518 rc = -1;
4519 break;
4520 }
4521
4522
4523 info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
4524 info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
4525 info->static_camera_characteristics = gStaticMetadata[cameraId];
4526
4527 return rc;
4528 }
4529
4530 /*===========================================================================
4531 * FUNCTION : translateCapabilityToMetadata
4532 *
4533 * DESCRIPTION: translate the capability into camera_metadata_t
4534 *
4535 * PARAMETERS : type of the request
4536 *
4537 *
4538 * RETURN : success: camera_metadata_t*
4539 * failure: NULL
4540 *
4541 *==========================================================================*/
translateCapabilityToMetadata(int type)4542 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
4543 {
4544 pthread_mutex_lock(&mMutex);
4545
4546 if (mDefaultMetadata[type] != NULL) {
4547 pthread_mutex_unlock(&mMutex);
4548 return mDefaultMetadata[type];
4549 }
4550 //first time we are handling this request
4551 //fill up the metadata structure using the wrapper class
4552 CameraMetadata settings;
4553 //translate from cam_capability_t to camera_metadata_tag_t
4554 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
4555 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
4556 int32_t defaultRequestID = 0;
4557 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
4558
4559 uint8_t controlIntent = 0;
4560 uint8_t focusMode;
4561 switch (type) {
4562 case CAMERA3_TEMPLATE_PREVIEW:
4563 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
4564 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4565 break;
4566 case CAMERA3_TEMPLATE_STILL_CAPTURE:
4567 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
4568 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4569 break;
4570 case CAMERA3_TEMPLATE_VIDEO_RECORD:
4571 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
4572 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4573 break;
4574 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
4575 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
4576 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
4577 break;
4578 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
4579 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
4580 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
4581 break;
4582 case CAMERA3_TEMPLATE_MANUAL:
4583 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
4584 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4585 break;
4586 default:
4587 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
4588 break;
4589 }
4590 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
4591
4592 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
4593 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
4594 }
4595 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
4596
4597 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
4598 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
4599
4600 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
4601 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4602
4603 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
4604 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
4605
4606 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
4607 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
4608
4609 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
4610 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
4611
4612 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
4613 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
4614
4615 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
4616 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
4617
4618 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
4619 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
4620
4621 /*flash*/
4622 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
4623 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
4624
4625 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
4626 settings.update(ANDROID_FLASH_FIRING_POWER,
4627 &flashFiringLevel, 1);
4628
4629 /* lens */
4630 float default_aperture = gCamCapability[mCameraId]->apertures[0];
4631 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
4632
4633 if (gCamCapability[mCameraId]->filter_densities_count) {
4634 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
4635 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
4636 gCamCapability[mCameraId]->filter_densities_count);
4637 }
4638
4639 float default_focal_length = gCamCapability[mCameraId]->focal_length;
4640 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
4641
4642 float default_focus_distance = 0;
4643 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
4644
4645 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
4646 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
4647
4648 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
4649 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
4650
4651 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
4652 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
4653
4654 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
4655 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
4656
4657 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
4658 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
4659
4660 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
4661 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
4662
4663 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4664 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4665
4666 /* Lens shading map mode */
4667 uint8_t shadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
4668 if (type == CAMERA3_TEMPLATE_STILL_CAPTURE &&
4669 gCamCapability[mCameraId]->supported_raw_dim_cnt) {
4670 shadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
4671 }
4672 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
4673
4674 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4675 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
4676
4677 /* Exposure time(Update the Min Exposure Time)*/
4678 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
4679 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
4680
4681 /* frame duration */
4682 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
4683 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
4684
4685 /* sensitivity */
4686 static const int32_t default_sensitivity = 100;
4687 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
4688
4689 /*edge mode*/
4690 static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
4691 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
4692
4693 /*noise reduction mode*/
4694 static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
4695 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
4696
4697 /*color correction mode*/
4698 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
4699 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
4700
4701 /*transform matrix mode*/
4702 static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
4703 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
4704
4705 uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
4706 settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
4707
4708 int32_t scaler_crop_region[4];
4709 scaler_crop_region[0] = 0;
4710 scaler_crop_region[1] = 0;
4711 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
4712 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
4713 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
4714
4715 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
4716 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
4717
4718 static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4719 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
4720
4721 uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
4722 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
4723 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
4724 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
4725
4726 /*focus distance*/
4727 float focus_distance = 0.0;
4728 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
4729
4730 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
4731 float max_range = 0.0;
4732 float max_fixed_fps = 0.0;
4733 int32_t fps_range[2] = {0, 0};
4734 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
4735 i++) {
4736 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
4737 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4738 if (type == CAMERA3_TEMPLATE_PREVIEW ||
4739 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
4740 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
4741 if (range > max_range) {
4742 fps_range[0] =
4743 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4744 fps_range[1] =
4745 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4746 max_range = range;
4747 }
4748 } else {
4749 if (range < 0.01 && max_fixed_fps <
4750 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
4751 fps_range[0] =
4752 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
4753 fps_range[1] =
4754 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4755 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
4756 }
4757 }
4758 }
4759 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
4760
4761 /*precapture trigger*/
4762 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
4763 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
4764
4765 /*af trigger*/
4766 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
4767 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
4768
4769 /* ae & af regions */
4770 int32_t active_region[] = {
4771 gCamCapability[mCameraId]->active_array_size.left,
4772 gCamCapability[mCameraId]->active_array_size.top,
4773 gCamCapability[mCameraId]->active_array_size.left +
4774 gCamCapability[mCameraId]->active_array_size.width,
4775 gCamCapability[mCameraId]->active_array_size.top +
4776 gCamCapability[mCameraId]->active_array_size.height,
4777 0};
4778 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
4779 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
4780
4781 /* black level lock */
4782 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
4783 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
4784
4785 /* face detect mode */
4786 uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
4787 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
4788
4789 //special defaults for manual template
4790 if (type == CAMERA3_TEMPLATE_MANUAL) {
4791 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
4792 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
4793
4794 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
4795 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
4796
4797 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
4798 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
4799
4800 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
4801 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
4802
4803 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
4804 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
4805
4806 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
4807 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
4808 }
4809 mDefaultMetadata[type] = settings.release();
4810
4811 pthread_mutex_unlock(&mMutex);
4812 return mDefaultMetadata[type];
4813 }
4814
4815 /*===========================================================================
4816 * FUNCTION : setFrameParameters
4817 *
4818 * DESCRIPTION: set parameters per frame as requested in the metadata from
4819 * framework
4820 *
4821 * PARAMETERS :
4822 * @request : request that needs to be serviced
4823 * @streamID : Stream ID of all the requested streams
4824 *
4825 * RETURN : success: NO_ERROR
4826 * failure:
4827 *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID)4828 int QCamera3HardwareInterface::setFrameParameters(
4829 camera3_capture_request_t *request,
4830 cam_stream_ID_t streamID)
4831 {
4832 /*translate from camera_metadata_t type to parm_type_t*/
4833 int rc = 0;
4834 int32_t hal_version = CAM_HAL_V3;
4835 if (mRepeatingRequest == true) {
4836 //chain of repeating request
4837 ALOGV("%s: chain of repeating request", __func__);
4838 } else {
4839 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
4840 }
4841
4842 memset(mParameters, 0, sizeof(metadata_buffer_t));
4843 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
4844 rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
4845 sizeof(hal_version), &hal_version);
4846 if (rc < 0) {
4847 ALOGE("%s: Failed to set hal version in the parameters", __func__);
4848 return BAD_VALUE;
4849 }
4850
4851 /*we need to update the frame number in the parameters*/
4852 rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
4853 sizeof(request->frame_number), &(request->frame_number));
4854 if (rc < 0) {
4855 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4856 return BAD_VALUE;
4857 }
4858
4859 /* Update stream id of all the requested buffers */
4860 rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
4861 sizeof(cam_stream_ID_t), &streamID);
4862
4863 if (rc < 0) {
4864 ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
4865 return BAD_VALUE;
4866 }
4867
4868 if(request->settings != NULL){
4869 mRepeatingRequest = false;
4870 rc = translateToHalMetadata(request, mParameters);
4871 } else {
4872 mRepeatingRequest = true;
4873 }
4874
4875 return rc;
4876 }
4877
4878 /*===========================================================================
4879 * FUNCTION : setReprocParameters
4880 *
4881 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
4882 * queue it to picture channel for reprocessing.
4883 *
4884 * PARAMETERS :
4885 * @request : request that needs to be serviced
4886 *
4887 * RETURN : success: NO_ERROR
4888 * failure: non zero failure code
4889 *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request)4890 int QCamera3HardwareInterface::setReprocParameters(
4891 camera3_capture_request_t *request)
4892 {
4893 /*translate from camera_metadata_t type to parm_type_t*/
4894 int rc = 0;
4895 metadata_buffer_t *reprocParam = NULL;
4896
4897 if(request->settings != NULL){
4898 ALOGE("%s: Reprocess settings cannot be NULL", __func__);
4899 return BAD_VALUE;
4900 }
4901 reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4902 if (!reprocParam) {
4903 ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
4904 return NO_MEMORY;
4905 }
4906 memset(reprocParam, 0, sizeof(metadata_buffer_t));
4907 reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
4908
4909 /*we need to update the frame number in the parameters*/
4910 rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
4911 sizeof(request->frame_number), &(request->frame_number));
4912 if (rc < 0) {
4913 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
4914 return BAD_VALUE;
4915 }
4916
4917
4918 rc = translateToHalMetadata(request, reprocParam);
4919 if (rc < 0) {
4920 ALOGE("%s: Failed to translate reproc request", __func__);
4921 delete reprocParam;
4922 return rc;
4923 }
4924 /*queue metadata for reprocessing*/
4925 rc = mPictureChannel->queueReprocMetadata(reprocParam);
4926 if (rc < 0) {
4927 ALOGE("%s: Failed to queue reprocessing metadata", __func__);
4928 delete reprocParam;
4929 }
4930 return rc;
4931 }
4932
4933 /*===========================================================================
4934 * FUNCTION : translateToHalMetadata
4935 *
4936 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
4937 *
4938 *
4939 * PARAMETERS :
4940 * @request : request sent from framework
4941 *
4942 *
4943 * RETURN : success: NO_ERROR
4944 * failure:
4945 *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata)4946 int QCamera3HardwareInterface::translateToHalMetadata
4947 (const camera3_capture_request_t *request,
4948 metadata_buffer_t *hal_metadata)
4949 {
4950 int rc = 0;
4951 CameraMetadata frame_settings;
4952 frame_settings = request->settings;
4953
4954 /* Do not change the order of the following list unless you know what you are
4955 * doing.
4956 * The order is laid out in such a way that parameters in the front of the table
4957 * may be used to override the parameters later in the table. Examples are:
4958 * 1. META_MODE should precede AEC/AWB/AF MODE
4959 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
4960 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
4961 * 4. Any mode should precede it's corresponding settings
4962 */
4963 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4964 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4965 rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
4966 sizeof(metaMode), &metaMode);
4967 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4968 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4969 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
4970 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
4971 fwk_sceneMode);
4972 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4973 sizeof(sceneMode), &sceneMode);
4974 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
4975 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4976 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4977 sizeof(sceneMode), &sceneMode);
4978 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
4979 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
4980 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
4981 sizeof(sceneMode), &sceneMode);
4982 }
4983 }
4984
4985 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
4986 uint8_t fwk_aeMode =
4987 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
4988 uint8_t aeMode;
4989 int32_t redeye;
4990
4991 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
4992 aeMode = CAM_AE_MODE_OFF;
4993 } else {
4994 aeMode = CAM_AE_MODE_ON;
4995 }
4996 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
4997 redeye = 1;
4998 } else {
4999 redeye = 0;
5000 }
5001
5002 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
5003 sizeof(AE_FLASH_MODE_MAP),
5004 fwk_aeMode);
5005 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
5006 sizeof(aeMode), &aeMode);
5007 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
5008 sizeof(flashMode), &flashMode);
5009 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
5010 sizeof(redeye), &redeye);
5011 }
5012
5013 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
5014 uint8_t fwk_whiteLevel =
5015 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
5016 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
5017 sizeof(WHITE_BALANCE_MODES_MAP),
5018 fwk_whiteLevel);
5019 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
5020 sizeof(whiteLevel), &whiteLevel);
5021 }
5022
5023 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
5024 uint8_t fwk_focusMode =
5025 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
5026 uint8_t focusMode;
5027 focusMode = lookupHalName(FOCUS_MODES_MAP,
5028 sizeof(FOCUS_MODES_MAP),
5029 fwk_focusMode);
5030 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
5031 sizeof(focusMode), &focusMode);
5032 }
5033
5034 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
5035 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
5036 rc = AddSetMetaEntryToBatch(hal_metadata,
5037 CAM_INTF_META_LENS_FOCUS_DISTANCE,
5038 sizeof(focalDistance), &focalDistance);
5039 }
5040
5041 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
5042 uint8_t fwk_antibandingMode =
5043 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
5044 uint8_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
5045 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
5046 fwk_antibandingMode);
5047 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
5048 sizeof(hal_antibandingMode), &hal_antibandingMode);
5049 }
5050
5051 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
5052 int32_t expCompensation = frame_settings.find(
5053 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
5054 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
5055 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
5056 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
5057 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
5058 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
5059 sizeof(expCompensation), &expCompensation);
5060 }
5061
5062 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
5063 int32_t expCompensation = frame_settings.find(
5064 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
5065 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
5066 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
5067 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
5068 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
5069 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
5070 sizeof(expCompensation), &expCompensation);
5071 }
5072
5073 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
5074 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
5075 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
5076 sizeof(aeLock), &aeLock);
5077 }
5078 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5079 cam_fps_range_t fps_range;
5080 fps_range.min_fps =
5081 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
5082 fps_range.max_fps =
5083 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
5084 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
5085 sizeof(fps_range), &fps_range);
5086 }
5087
5088 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
5089 uint8_t awbLock =
5090 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
5091 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
5092 sizeof(awbLock), &awbLock);
5093 }
5094
5095 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
5096 uint8_t fwk_effectMode =
5097 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
5098 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
5099 sizeof(EFFECT_MODES_MAP),
5100 fwk_effectMode);
5101 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
5102 sizeof(effectMode), &effectMode);
5103 }
5104
5105 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
5106 uint8_t colorCorrectMode =
5107 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
5108 rc =
5109 AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
5110 sizeof(colorCorrectMode), &colorCorrectMode);
5111 }
5112
5113 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
5114 cam_color_correct_gains_t colorCorrectGains;
5115 for (int i = 0; i < 4; i++) {
5116 colorCorrectGains.gains[i] =
5117 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
5118 }
5119 rc =
5120 AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
5121 sizeof(colorCorrectGains), &colorCorrectGains);
5122 }
5123
5124 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
5125 cam_color_correct_matrix_t colorCorrectTransform;
5126 cam_rational_type_t transform_elem;
5127 int num = 0;
5128 for (int i = 0; i < 3; i++) {
5129 for (int j = 0; j < 3; j++) {
5130 transform_elem.numerator =
5131 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
5132 transform_elem.denominator =
5133 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
5134 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
5135 num++;
5136 }
5137 }
5138 rc =
5139 AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
5140 sizeof(colorCorrectTransform), &colorCorrectTransform);
5141 }
5142
5143 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) {
5144 cam_trigger_t aecTrigger;
5145 aecTrigger.trigger =
5146 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
5147 rc = AddSetMetaEntryToBatch(hal_metadata,
5148 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
5149 sizeof(aecTrigger), &aecTrigger);
5150 }
5151
5152 /*af_trigger must come with a trigger id*/
5153 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER)) {
5154 cam_trigger_t af_trigger;
5155 af_trigger.trigger =
5156 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
5157 rc = AddSetMetaEntryToBatch(hal_metadata,
5158 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
5159 }
5160
5161 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
5162 int32_t demosaic =
5163 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
5164 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
5165 sizeof(demosaic), &demosaic);
5166 }
5167
5168 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
5169 cam_edge_application_t edge_application;
5170 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
5171 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
5172 edge_application.sharpness = 0;
5173 } else {
5174 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
5175 uint8_t edgeStrength =
5176 frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
5177 edge_application.sharpness = (int32_t)edgeStrength;
5178 } else {
5179 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
5180 }
5181 }
5182 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
5183 sizeof(edge_application), &edge_application);
5184 }
5185
5186 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
5187 int32_t respectFlashMode = 1;
5188 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
5189 uint8_t fwk_aeMode =
5190 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
5191 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
5192 respectFlashMode = 0;
5193 ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
5194 __func__);
5195 }
5196 }
5197 if (respectFlashMode) {
5198 uint8_t flashMode =
5199 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
5200 flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
5201 sizeof(FLASH_MODES_MAP),
5202 flashMode);
5203 ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
5204 // To check: CAM_INTF_META_FLASH_MODE usage
5205 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
5206 sizeof(flashMode), &flashMode);
5207 }
5208 }
5209
5210 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
5211 uint8_t flashPower =
5212 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
5213 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
5214 sizeof(flashPower), &flashPower);
5215 }
5216
5217 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
5218 int64_t flashFiringTime =
5219 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
5220 rc = AddSetMetaEntryToBatch(hal_metadata,
5221 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
5222 }
5223
5224 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
5225 uint8_t hotPixelMode =
5226 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
5227 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
5228 sizeof(hotPixelMode), &hotPixelMode);
5229 }
5230
5231 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
5232 float lensAperture =
5233 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
5234 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
5235 sizeof(lensAperture), &lensAperture);
5236 }
5237
5238 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
5239 float filterDensity =
5240 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
5241 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
5242 sizeof(filterDensity), &filterDensity);
5243 }
5244
5245 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
5246 float focalLength =
5247 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
5248 rc = AddSetMetaEntryToBatch(hal_metadata,
5249 CAM_INTF_META_LENS_FOCAL_LENGTH,
5250 sizeof(focalLength), &focalLength);
5251 }
5252
5253 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
5254 uint8_t optStabMode =
5255 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
5256 rc = AddSetMetaEntryToBatch(hal_metadata,
5257 CAM_INTF_META_LENS_OPT_STAB_MODE,
5258 sizeof(optStabMode), &optStabMode);
5259 }
5260
5261 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5262 uint8_t noiseRedMode =
5263 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5264 rc = AddSetMetaEntryToBatch(hal_metadata,
5265 CAM_INTF_META_NOISE_REDUCTION_MODE,
5266 sizeof(noiseRedMode), &noiseRedMode);
5267 }
5268
5269 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
5270 uint8_t noiseRedStrength =
5271 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
5272 rc = AddSetMetaEntryToBatch(hal_metadata,
5273 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
5274 sizeof(noiseRedStrength), &noiseRedStrength);
5275 }
5276
5277 cam_crop_region_t scalerCropRegion;
5278 bool scalerCropSet = false;
5279 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
5280 scalerCropRegion.left =
5281 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
5282 scalerCropRegion.top =
5283 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
5284 scalerCropRegion.width =
5285 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
5286 scalerCropRegion.height =
5287 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
5288 rc = AddSetMetaEntryToBatch(hal_metadata,
5289 CAM_INTF_META_SCALER_CROP_REGION,
5290 sizeof(scalerCropRegion), &scalerCropRegion);
5291 scalerCropSet = true;
5292 }
5293
5294 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
5295 int64_t sensorExpTime =
5296 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
5297 ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
5298 rc = AddSetMetaEntryToBatch(hal_metadata,
5299 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
5300 sizeof(sensorExpTime), &sensorExpTime);
5301 }
5302
5303 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
5304 int64_t sensorFrameDuration =
5305 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
5306 int64_t minFrameDuration = getMinFrameDuration(request);
5307 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
5308 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
5309 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
5310 ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
5311 rc = AddSetMetaEntryToBatch(hal_metadata,
5312 CAM_INTF_META_SENSOR_FRAME_DURATION,
5313 sizeof(sensorFrameDuration), &sensorFrameDuration);
5314 }
5315
5316 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
5317 int32_t sensorSensitivity =
5318 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
5319 if (sensorSensitivity <
5320 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
5321 sensorSensitivity =
5322 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
5323 if (sensorSensitivity >
5324 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
5325 sensorSensitivity =
5326 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
5327 ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
5328 rc = AddSetMetaEntryToBatch(hal_metadata,
5329 CAM_INTF_META_SENSOR_SENSITIVITY,
5330 sizeof(sensorSensitivity), &sensorSensitivity);
5331 }
5332
5333 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
5334 int32_t shadingMode =
5335 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
5336 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
5337 sizeof(shadingMode), &shadingMode);
5338 }
5339
5340 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
5341 uint8_t shadingStrength =
5342 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
5343 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
5344 sizeof(shadingStrength), &shadingStrength);
5345 }
5346
5347 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5348 uint8_t fwk_facedetectMode =
5349 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5350 uint8_t facedetectMode =
5351 lookupHalName(FACEDETECT_MODES_MAP,
5352 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
5353 rc = AddSetMetaEntryToBatch(hal_metadata,
5354 CAM_INTF_META_STATS_FACEDETECT_MODE,
5355 sizeof(facedetectMode), &facedetectMode);
5356 }
5357
5358 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
5359 uint8_t histogramMode =
5360 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
5361 rc = AddSetMetaEntryToBatch(hal_metadata,
5362 CAM_INTF_META_STATS_HISTOGRAM_MODE,
5363 sizeof(histogramMode), &histogramMode);
5364 }
5365
5366 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
5367 uint8_t sharpnessMapMode =
5368 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
5369 rc = AddSetMetaEntryToBatch(hal_metadata,
5370 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
5371 sizeof(sharpnessMapMode), &sharpnessMapMode);
5372 }
5373
5374 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
5375 uint8_t tonemapMode =
5376 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
5377 rc = AddSetMetaEntryToBatch(hal_metadata,
5378 CAM_INTF_META_TONEMAP_MODE,
5379 sizeof(tonemapMode), &tonemapMode);
5380 }
5381 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
5382 /*All tonemap channels will have the same number of points*/
5383 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
5384 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
5385 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
5386 cam_rgb_tonemap_curves tonemapCurves;
5387 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
5388
5389 /* ch0 = G*/
5390 int point = 0;
5391 cam_tonemap_curve_t tonemapCurveGreen;
5392 for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
5393 for (int j = 0; j < 2; j++) {
5394 tonemapCurveGreen.tonemap_points[i][j] =
5395 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
5396 point++;
5397 }
5398 }
5399 tonemapCurves.curves[0] = tonemapCurveGreen;
5400
5401 /* ch 1 = B */
5402 point = 0;
5403 cam_tonemap_curve_t tonemapCurveBlue;
5404 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
5405 for (int j = 0; j < 2; j++) {
5406 tonemapCurveBlue.tonemap_points[i][j] =
5407 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
5408 point++;
5409 }
5410 }
5411 tonemapCurves.curves[1] = tonemapCurveBlue;
5412
5413 /* ch 2 = R */
5414 point = 0;
5415 cam_tonemap_curve_t tonemapCurveRed;
5416 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
5417 for (int j = 0; j < 2; j++) {
5418 tonemapCurveRed.tonemap_points[i][j] =
5419 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
5420 point++;
5421 }
5422 }
5423 tonemapCurves.curves[2] = tonemapCurveRed;
5424
5425 rc = AddSetMetaEntryToBatch(hal_metadata,
5426 CAM_INTF_META_TONEMAP_CURVES,
5427 sizeof(tonemapCurves), &tonemapCurves);
5428 }
5429
5430 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5431 uint8_t captureIntent =
5432 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5433 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
5434 sizeof(captureIntent), &captureIntent);
5435 }
5436
5437 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
5438 uint8_t blackLevelLock =
5439 frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
5440 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
5441 sizeof(blackLevelLock), &blackLevelLock);
5442 }
5443
5444 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5445 uint8_t lensShadingMapMode =
5446 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5447 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5448 sizeof(lensShadingMapMode), &lensShadingMapMode);
5449 }
5450
5451 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
5452 cam_area_t roi;
5453 bool reset = true;
5454 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
5455 if (scalerCropSet) {
5456 reset = resetIfNeededROI(&roi, &scalerCropRegion);
5457 }
5458 if (reset) {
5459 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
5460 sizeof(roi), &roi);
5461 }
5462 }
5463
5464 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
5465 cam_area_t roi;
5466 bool reset = true;
5467 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
5468 if (scalerCropSet) {
5469 reset = resetIfNeededROI(&roi, &scalerCropRegion);
5470 }
5471 if (reset) {
5472 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
5473 sizeof(roi), &roi);
5474 }
5475 }
5476
5477 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
5478 cam_test_pattern_data_t testPatternData;
5479 uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
5480 uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
5481 sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
5482
5483 memset(&testPatternData, 0, sizeof(testPatternData));
5484 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
5485 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
5486 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
5487 int32_t* fwk_testPatternData = frame_settings.find(
5488 ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
5489 testPatternData.r = fwk_testPatternData[0];
5490 testPatternData.b = fwk_testPatternData[3];
5491 switch (gCamCapability[mCameraId]->color_arrangement) {
5492 case CAM_FILTER_ARRANGEMENT_RGGB:
5493 case CAM_FILTER_ARRANGEMENT_GRBG:
5494 testPatternData.gr = fwk_testPatternData[1];
5495 testPatternData.gb = fwk_testPatternData[2];
5496 break;
5497 case CAM_FILTER_ARRANGEMENT_GBRG:
5498 case CAM_FILTER_ARRANGEMENT_BGGR:
5499 testPatternData.gr = fwk_testPatternData[2];
5500 testPatternData.gb = fwk_testPatternData[1];
5501 break;
5502 default:
5503 ALOGE("%s: color arrangement %d is not supported", __func__,
5504 gCamCapability[mCameraId]->color_arrangement);
5505 break;
5506 }
5507 }
5508 rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
5509 sizeof(testPatternData), &testPatternData);
5510 }
5511
5512 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
5513 double *gps_coords =
5514 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
5515 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
5516 }
5517
5518 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
5519 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
5520 const char *gps_methods_src = (const char *)
5521 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
5522 uint32_t count = frame_settings.find(
5523 ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
5524 memset(gps_methods, 0, sizeof(gps_methods));
5525 strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
5526 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
5527 }
5528
5529 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
5530 int64_t gps_timestamp =
5531 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
5532 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
5533 }
5534
5535 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5536 int32_t orientation =
5537 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5538 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
5539 }
5540
5541 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
5542 int8_t quality =
5543 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
5544 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
5545 }
5546
5547 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
5548 int8_t thumb_quality =
5549 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
5550 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
5551 }
5552
5553 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5554 cam_dimension_t dim;
5555 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5556 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5557 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
5558 }
5559
5560 // Internal metadata
5561 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
5562 uint8_t* privatedata =
5563 frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
5564 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
5565 sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
5566 }
5567
5568 // EV step
5569 rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
5570 sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
5571
5572 return rc;
5573 }
5574
5575 /*===========================================================================
5576 * FUNCTION : captureResultCb
5577 *
5578 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
5579 *
5580 * PARAMETERS :
5581 * @frame : frame information from mm-camera-interface
5582 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
5583 * @userdata: userdata
5584 *
5585 * RETURN : NONE
5586 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,void * userdata)5587 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
5588 camera3_stream_buffer_t *buffer,
5589 uint32_t frame_number, void *userdata)
5590 {
5591 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
5592 if (hw == NULL) {
5593 ALOGE("%s: Invalid hw %p", __func__, hw);
5594 return;
5595 }
5596
5597 hw->captureResultCb(metadata, buffer, frame_number);
5598 return;
5599 }
5600
5601
5602 /*===========================================================================
5603 * FUNCTION : initialize
5604 *
5605 * DESCRIPTION: Pass framework callback pointers to HAL
5606 *
5607 * PARAMETERS :
5608 *
5609 *
5610 * RETURN : Success : 0
5611 * Failure: -ENODEV
5612 *==========================================================================*/
5613
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)5614 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
5615 const camera3_callback_ops_t *callback_ops)
5616 {
5617 ALOGV("%s: E", __func__);
5618 QCamera3HardwareInterface *hw =
5619 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5620 if (!hw) {
5621 ALOGE("%s: NULL camera device", __func__);
5622 return -ENODEV;
5623 }
5624
5625 int rc = hw->initialize(callback_ops);
5626 ALOGV("%s: X", __func__);
5627 return rc;
5628 }
5629
5630 /*===========================================================================
5631 * FUNCTION : configure_streams
5632 *
5633 * DESCRIPTION:
5634 *
5635 * PARAMETERS :
5636 *
5637 *
5638 * RETURN : Success: 0
5639 * Failure: -EINVAL (if stream configuration is invalid)
5640 * -ENODEV (fatal error)
5641 *==========================================================================*/
5642
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)5643 int QCamera3HardwareInterface::configure_streams(
5644 const struct camera3_device *device,
5645 camera3_stream_configuration_t *stream_list)
5646 {
5647 ALOGV("%s: E", __func__);
5648 QCamera3HardwareInterface *hw =
5649 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5650 if (!hw) {
5651 ALOGE("%s: NULL camera device", __func__);
5652 return -ENODEV;
5653 }
5654 int rc = hw->configureStreams(stream_list);
5655 ALOGV("%s: X", __func__);
5656 return rc;
5657 }
5658
5659 /*===========================================================================
5660 * FUNCTION : register_stream_buffers
5661 *
5662 * DESCRIPTION: Register stream buffers with the device
5663 *
5664 * PARAMETERS :
5665 *
5666 * RETURN :
5667 *==========================================================================*/
register_stream_buffers(const struct camera3_device * device,const camera3_stream_buffer_set_t * buffer_set)5668 int QCamera3HardwareInterface::register_stream_buffers(
5669 const struct camera3_device *device,
5670 const camera3_stream_buffer_set_t *buffer_set)
5671 {
5672 ALOGV("%s: E", __func__);
5673 QCamera3HardwareInterface *hw =
5674 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5675 if (!hw) {
5676 ALOGE("%s: NULL camera device", __func__);
5677 return -ENODEV;
5678 }
5679 int rc = hw->registerStreamBuffers(buffer_set);
5680 ALOGV("%s: X", __func__);
5681 return rc;
5682 }
5683
5684 /*===========================================================================
5685 * FUNCTION : construct_default_request_settings
5686 *
5687 * DESCRIPTION: Configure a settings buffer to meet the required use case
5688 *
5689 * PARAMETERS :
5690 *
5691 *
5692 * RETURN : Success: Return valid metadata
5693 * Failure: Return NULL
5694 *==========================================================================*/
5695 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)5696 construct_default_request_settings(const struct camera3_device *device,
5697 int type)
5698 {
5699
5700 ALOGV("%s: E", __func__);
5701 camera_metadata_t* fwk_metadata = NULL;
5702 QCamera3HardwareInterface *hw =
5703 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5704 if (!hw) {
5705 ALOGE("%s: NULL camera device", __func__);
5706 return NULL;
5707 }
5708
5709 fwk_metadata = hw->translateCapabilityToMetadata(type);
5710
5711 ALOGV("%s: X", __func__);
5712 return fwk_metadata;
5713 }
5714
5715 /*===========================================================================
5716 * FUNCTION : process_capture_request
5717 *
5718 * DESCRIPTION:
5719 *
5720 * PARAMETERS :
5721 *
5722 *
5723 * RETURN :
5724 *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)5725 int QCamera3HardwareInterface::process_capture_request(
5726 const struct camera3_device *device,
5727 camera3_capture_request_t *request)
5728 {
5729 ALOGV("%s: E", __func__);
5730 QCamera3HardwareInterface *hw =
5731 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5732 if (!hw) {
5733 ALOGE("%s: NULL camera device", __func__);
5734 return -EINVAL;
5735 }
5736
5737 int rc = hw->processCaptureRequest(request);
5738 ALOGV("%s: X", __func__);
5739 return rc;
5740 }
5741
5742 /*===========================================================================
5743 * FUNCTION : dump
5744 *
5745 * DESCRIPTION:
5746 *
5747 * PARAMETERS :
5748 *
5749 *
5750 * RETURN :
5751 *==========================================================================*/
5752
dump(const struct camera3_device * device,int fd)5753 void QCamera3HardwareInterface::dump(
5754 const struct camera3_device *device, int fd)
5755 {
5756 ALOGV("%s: E", __func__);
5757 QCamera3HardwareInterface *hw =
5758 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5759 if (!hw) {
5760 ALOGE("%s: NULL camera device", __func__);
5761 return;
5762 }
5763
5764 hw->dump(fd);
5765 ALOGV("%s: X", __func__);
5766 return;
5767 }
5768
5769 /*===========================================================================
5770 * FUNCTION : flush
5771 *
5772 * DESCRIPTION:
5773 *
5774 * PARAMETERS :
5775 *
5776 *
5777 * RETURN :
5778 *==========================================================================*/
5779
flush(const struct camera3_device * device)5780 int QCamera3HardwareInterface::flush(
5781 const struct camera3_device *device)
5782 {
5783 int rc;
5784 ALOGV("%s: E", __func__);
5785 QCamera3HardwareInterface *hw =
5786 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
5787 if (!hw) {
5788 ALOGE("%s: NULL camera device", __func__);
5789 return -EINVAL;
5790 }
5791
5792 rc = hw->flush();
5793 ALOGV("%s: X", __func__);
5794 return rc;
5795 }
5796
5797 /*===========================================================================
5798 * FUNCTION : close_camera_device
5799 *
5800 * DESCRIPTION:
5801 *
5802 * PARAMETERS :
5803 *
5804 *
5805 * RETURN :
5806 *==========================================================================*/
close_camera_device(struct hw_device_t * device)5807 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
5808 {
5809 ALOGV("%s: E", __func__);
5810 int ret = NO_ERROR;
5811 QCamera3HardwareInterface *hw =
5812 reinterpret_cast<QCamera3HardwareInterface *>(
5813 reinterpret_cast<camera3_device_t *>(device)->priv);
5814 if (!hw) {
5815 ALOGE("NULL camera device");
5816 return BAD_VALUE;
5817 }
5818 delete hw;
5819
5820 pthread_mutex_lock(&mCameraSessionLock);
5821 mCameraSessionActive = 0;
5822 pthread_mutex_unlock(&mCameraSessionLock);
5823 ALOGV("%s: X", __func__);
5824 return ret;
5825 }
5826
5827 /*===========================================================================
5828 * FUNCTION : getWaveletDenoiseProcessPlate
5829 *
5830 * DESCRIPTION: query wavelet denoise process plate
5831 *
5832 * PARAMETERS : None
5833 *
5834 * RETURN : WNR prcocess plate vlaue
5835 *==========================================================================*/
getWaveletDenoiseProcessPlate()5836 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
5837 {
5838 char prop[PROPERTY_VALUE_MAX];
5839 memset(prop, 0, sizeof(prop));
5840 property_get("persist.denoise.process.plates", prop, "0");
5841 int processPlate = atoi(prop);
5842 switch(processPlate) {
5843 case 0:
5844 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
5845 case 1:
5846 return CAM_WAVELET_DENOISE_CBCR_ONLY;
5847 case 2:
5848 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5849 case 3:
5850 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
5851 default:
5852 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
5853 }
5854 }
5855
5856 /*===========================================================================
5857 * FUNCTION : needRotationReprocess
5858 *
5859 * DESCRIPTION: if rotation needs to be done by reprocess in pp
5860 *
5861 * PARAMETERS : none
5862 *
5863 * RETURN : true: needed
5864 * false: no need
5865 *==========================================================================*/
needRotationReprocess()5866 bool QCamera3HardwareInterface::needRotationReprocess()
5867 {
5868 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
5869 // current rotation is not zero, and pp has the capability to process rotation
5870 ALOGD("%s: need do reprocess for rotation", __func__);
5871 return true;
5872 }
5873
5874 return false;
5875 }
5876
5877 /*===========================================================================
5878 * FUNCTION : needReprocess
5879 *
5880 * DESCRIPTION: if reprocess in needed
5881 *
5882 * PARAMETERS : none
5883 *
5884 * RETURN : true: needed
5885 * false: no need
5886 *==========================================================================*/
needReprocess()5887 bool QCamera3HardwareInterface::needReprocess()
5888 {
5889 if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
5890 // TODO: add for ZSL HDR later
5891 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
5892 ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
5893 return true;
5894 }
5895 return needRotationReprocess();
5896 }
5897
5898 /*===========================================================================
5899 * FUNCTION : addOfflineReprocChannel
5900 *
5901 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
5902 * coming from input channel
5903 *
5904 * PARAMETERS :
5905 * @pInputChannel : ptr to input channel whose frames will be post-processed
5906 *
5907 * RETURN : Ptr to the newly created channel obj. NULL if failed.
5908 *==========================================================================*/
addOfflineReprocChannel(QCamera3Channel * pInputChannel,QCamera3PicChannel * picChHandle,metadata_buffer_t * metadata)5909 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
5910 QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
5911 {
5912 int32_t rc = NO_ERROR;
5913 QCamera3ReprocessChannel *pChannel = NULL;
5914 if (pInputChannel == NULL) {
5915 ALOGE("%s: input channel obj is NULL", __func__);
5916 return NULL;
5917 }
5918
5919 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
5920 mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
5921 if (NULL == pChannel) {
5922 ALOGE("%s: no mem for reprocess channel", __func__);
5923 return NULL;
5924 }
5925
5926 rc = pChannel->initialize();
5927 if (rc != NO_ERROR) {
5928 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
5929 delete pChannel;
5930 return NULL;
5931 }
5932
5933 // pp feature config
5934 cam_pp_feature_config_t pp_config;
5935 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
5936
5937 if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
5938 cam_edge_application_t *edge = (cam_edge_application_t *)
5939 POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
5940 if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
5941 pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
5942 pp_config.sharpness = edge->sharpness;
5943 }
5944 }
5945
5946 if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
5947 uint8_t *noise_mode = (uint8_t *)POINTER_OF(
5948 CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
5949 if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
5950 pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
5951 pp_config.denoise2d.denoise_enable = 1;
5952 pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
5953 }
5954 }
5955
5956 if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
5957 int32_t *rotation = (int32_t *)POINTER_OF(
5958 CAM_INTF_META_JPEG_ORIENTATION, metadata);
5959
5960 if (needRotationReprocess()) {
5961 pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
5962 if (*rotation == 0) {
5963 pp_config.rotation = ROTATE_0;
5964 } else if (*rotation == 90) {
5965 pp_config.rotation = ROTATE_90;
5966 } else if (*rotation == 180) {
5967 pp_config.rotation = ROTATE_180;
5968 } else if (*rotation == 270) {
5969 pp_config.rotation = ROTATE_270;
5970 }
5971 }
5972 }
5973
5974 rc = pChannel->addReprocStreamsFromSource(pp_config,
5975 pInputChannel,
5976 mMetadataChannel);
5977
5978 if (rc != NO_ERROR) {
5979 delete pChannel;
5980 return NULL;
5981 }
5982 return pChannel;
5983 }
5984
5985 }; //end namespace qcamera
5986