1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <sync/sync.h>
46 #include <gralloc_priv.h>
47 #include "util/QCameraFlash.h"
48 #include "QCamera3HWI.h"
49 #include "QCamera3Mem.h"
50 #include "QCamera3Channel.h"
51 #include "QCamera3PostProc.h"
52 #include "QCamera3VendorTags.h"
53
54 using namespace android;
55
56 namespace qcamera {
57
58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60 #define EMPTY_PIPELINE_DELAY 2
61 #define PARTIAL_RESULT_COUNT 3
62 #define FRAME_SKIP_DELAY 0
63 #define CAM_MAX_SYNC_LATENCY 4
64
65 #define MAX_VALUE_8BIT ((1<<8)-1)
66 #define MAX_VALUE_10BIT ((1<<10)-1)
67 #define MAX_VALUE_12BIT ((1<<12)-1)
68
69 #define VIDEO_4K_WIDTH 3840
70 #define VIDEO_4K_HEIGHT 2160
71
72 #define MAX_EIS_WIDTH 1920
73 #define MAX_EIS_HEIGHT 1080
74
75 #define MAX_RAW_STREAMS 1
76 #define MAX_STALLING_STREAMS 1
77 #define MAX_PROCESSED_STREAMS 3
78 /* Batch mode is enabled only if FPS set is equal to or greater than this */
79 #define MIN_FPS_FOR_BATCH_MODE (120)
80 #define PREVIEW_FPS_FOR_HFR (30)
81 #define DEFAULT_VIDEO_FPS (30.0)
82 #define MAX_HFR_BATCH_SIZE (8)
83 #define REGIONS_TUPLE_COUNT 5
84 #define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
85
86 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
89 CAM_QCOM_FEATURE_CROP |\
90 CAM_QCOM_FEATURE_ROTATION |\
91 CAM_QCOM_FEATURE_SHARPNESS |\
92 CAM_QCOM_FEATURE_SCALE |\
93 CAM_QCOM_FEATURE_CAC |\
94 CAM_QCOM_FEATURE_CDS )
95
96 #define TIMEOUT_NEVER -1
97
98 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101 volatile uint32_t gCamHal3LogLevel = 1;
102
103 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104 {"On", CAM_CDS_MODE_ON},
105 {"Off", CAM_CDS_MODE_OFF},
106 {"Auto",CAM_CDS_MODE_AUTO}
107 };
108
109 const QCamera3HardwareInterface::QCameraMap<
110 camera_metadata_enum_android_control_effect_mode_t,
111 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
113 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
114 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
115 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
116 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
117 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
118 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
121 };
122
123 const QCamera3HardwareInterface::QCameraMap<
124 camera_metadata_enum_android_control_awb_mode_t,
125 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
127 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
128 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
129 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
130 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
132 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
134 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
135 };
136
137 const QCamera3HardwareInterface::QCameraMap<
138 camera_metadata_enum_android_control_scene_mode_t,
139 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
141 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
142 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
143 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
144 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
145 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
147 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
148 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
149 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
150 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
151 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
152 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
153 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
154 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
155 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
156 };
157
158 const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_af_mode_t,
160 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
162 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
163 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
164 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
165 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
166 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168 };
169
170 const QCamera3HardwareInterface::QCameraMap<
171 camera_metadata_enum_android_color_correction_aberration_mode_t,
172 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174 CAM_COLOR_CORRECTION_ABERRATION_OFF },
175 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176 CAM_COLOR_CORRECTION_ABERRATION_FAST },
177 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179 };
180
181 const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_control_ae_antibanding_mode_t,
183 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
185 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188 };
189
190 const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_ae_mode_t,
192 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
194 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
195 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
196 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
197 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198 };
199
200 const QCamera3HardwareInterface::QCameraMap<
201 camera_metadata_enum_android_flash_mode_t,
202 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
204 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
206 };
207
208 const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_statistics_face_detect_mode_t,
210 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
212 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
213 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
214 };
215
216 const QCamera3HardwareInterface::QCameraMap<
217 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220 CAM_FOCUS_UNCALIBRATED },
221 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222 CAM_FOCUS_APPROXIMATE },
223 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224 CAM_FOCUS_CALIBRATED }
225 };
226
227 const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_lens_state_t,
229 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
231 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
232 };
233
234 const int32_t available_thumbnail_sizes[] = {0, 0,
235 176, 144,
236 320, 240,
237 432, 288,
238 480, 288,
239 512, 288,
240 512, 384};
241
242 const QCamera3HardwareInterface::QCameraMap<
243 camera_metadata_enum_android_sensor_test_pattern_mode_t,
244 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
246 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
247 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
248 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
250 };
251
252 /* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257 const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_sensor_reference_illuminant1_t,
259 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276 };
277
278 const QCamera3HardwareInterface::QCameraMap<
279 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280 { 60, CAM_HFR_MODE_60FPS},
281 { 90, CAM_HFR_MODE_90FPS},
282 { 120, CAM_HFR_MODE_120FPS},
283 { 150, CAM_HFR_MODE_150FPS},
284 { 180, CAM_HFR_MODE_180FPS},
285 { 210, CAM_HFR_MODE_210FPS},
286 { 240, CAM_HFR_MODE_240FPS},
287 { 480, CAM_HFR_MODE_480FPS},
288 };
289
290 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291 .initialize = QCamera3HardwareInterface::initialize,
292 .configure_streams = QCamera3HardwareInterface::configure_streams,
293 .register_stream_buffers = NULL,
294 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
295 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
296 .get_metadata_vendor_tag_ops = NULL,
297 .dump = QCamera3HardwareInterface::dump,
298 .flush = QCamera3HardwareInterface::flush,
299 .reserved = {0},
300 };
301
302 /*===========================================================================
303 * FUNCTION : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 * @cameraId : camera ID
309 *
310 * RETURN : none
311 *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)312 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313 const camera_module_callbacks_t *callbacks)
314 : mCameraId(cameraId),
315 mCameraHandle(NULL),
316 mCameraOpened(false),
317 mCameraInitialized(false),
318 mCallbackOps(NULL),
319 mMetadataChannel(NULL),
320 mPictureChannel(NULL),
321 mRawChannel(NULL),
322 mSupportChannel(NULL),
323 mAnalysisChannel(NULL),
324 mRawDumpChannel(NULL),
325 mDummyBatchChannel(NULL),
326 mChannelHandle(0),
327 mFirstRequest(false),
328 mFirstConfiguration(true),
329 mFlush(false),
330 mParamHeap(NULL),
331 mParameters(NULL),
332 mPrevParameters(NULL),
333 m_bIsVideo(false),
334 m_bIs4KVideo(false),
335 m_bEisSupportedSize(false),
336 m_bEisEnable(false),
337 m_MobicatMask(0),
338 mMinProcessedFrameDuration(0),
339 mMinJpegFrameDuration(0),
340 mMinRawFrameDuration(0),
341 mMetaFrameCount(0U),
342 mUpdateDebugLevel(false),
343 mCallbacks(callbacks),
344 mCaptureIntent(0),
345 mHybridAeEnable(0),
346 mBatchSize(0),
347 mToBeQueuedVidBufs(0),
348 mHFRVideoFps(DEFAULT_VIDEO_FPS),
349 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
350 mFirstFrameNumberInBatch(0),
351 mNeedSensorRestart(false),
352 mLdafCalibExist(false),
353 mPowerHintEnabled(false),
354 mLastCustIntentFrmNum(-1)
355 {
356 getLogLevel();
357 m_perfLock.lock_init();
358 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
359 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
360 mCameraDevice.common.close = close_camera_device;
361 mCameraDevice.ops = &mCameraOps;
362 mCameraDevice.priv = this;
363 gCamCapability[cameraId]->version = CAM_HAL_V3;
364 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
365 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
366 gCamCapability[cameraId]->min_num_pp_bufs = 3;
367 pthread_cond_init(&mRequestCond, NULL);
368 mPendingLiveRequest = 0;
369 mCurrentRequestId = -1;
370 pthread_mutex_init(&mMutex, NULL);
371
372 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
373 mDefaultMetadata[i] = NULL;
374
375 // Getting system props of different kinds
376 char prop[PROPERTY_VALUE_MAX];
377 memset(prop, 0, sizeof(prop));
378 property_get("persist.camera.raw.dump", prop, "0");
379 mEnableRawDump = atoi(prop);
380 if (mEnableRawDump)
381 CDBG("%s: Raw dump from Camera HAL enabled", __func__);
382
383 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
384 memset(mLdafCalib, 0, sizeof(mLdafCalib));
385
386 memset(prop, 0, sizeof(prop));
387 property_get("persist.camera.tnr.preview", prop, "1");
388 m_bTnrPreview = (uint8_t)atoi(prop);
389
390 memset(prop, 0, sizeof(prop));
391 property_get("persist.camera.tnr.video", prop, "1");
392 m_bTnrVideo = (uint8_t)atoi(prop);
393
394 mPendingBuffersMap.num_buffers = 0;
395 mPendingBuffersMap.last_frame_number = -1;
396 }
397
398 /*===========================================================================
399 * FUNCTION : ~QCamera3HardwareInterface
400 *
401 * DESCRIPTION: destructor of QCamera3HardwareInterface
402 *
403 * PARAMETERS : none
404 *
405 * RETURN : none
406 *==========================================================================*/
~QCamera3HardwareInterface()407 QCamera3HardwareInterface::~QCamera3HardwareInterface()
408 {
409 CDBG("%s: E", __func__);
410 bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
411
412 /* Turn off current power hint before acquiring perfLock in case they
413 * conflict with each other */
414 disablePowerHint();
415
416 m_perfLock.lock_acq();
417
418 /* We need to stop all streams before deleting any stream */
419 if (mRawDumpChannel) {
420 mRawDumpChannel->stop();
421 }
422
423 // NOTE: 'camera3_stream_t *' objects are already freed at
424 // this stage by the framework
425 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
426 it != mStreamInfo.end(); it++) {
427 QCamera3ProcessingChannel *channel = (*it)->channel;
428 if (channel) {
429 channel->stop();
430 }
431 }
432 if (mSupportChannel)
433 mSupportChannel->stop();
434
435 if (mAnalysisChannel) {
436 mAnalysisChannel->stop();
437 }
438 if (mMetadataChannel) {
439 mMetadataChannel->stop();
440 }
441 if (mChannelHandle) {
442 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
443 mChannelHandle);
444 ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
445 }
446
447 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
448 it != mStreamInfo.end(); it++) {
449 QCamera3ProcessingChannel *channel = (*it)->channel;
450 if (channel)
451 delete channel;
452 free (*it);
453 }
454 if (mSupportChannel) {
455 delete mSupportChannel;
456 mSupportChannel = NULL;
457 }
458
459 if (mAnalysisChannel) {
460 delete mAnalysisChannel;
461 mAnalysisChannel = NULL;
462 }
463 if (mRawDumpChannel) {
464 delete mRawDumpChannel;
465 mRawDumpChannel = NULL;
466 }
467 if (mDummyBatchChannel) {
468 delete mDummyBatchChannel;
469 mDummyBatchChannel = NULL;
470 }
471 mPictureChannel = NULL;
472
473 if (mMetadataChannel) {
474 delete mMetadataChannel;
475 mMetadataChannel = NULL;
476 }
477
478 /* Clean up all channels */
479 if (mCameraInitialized) {
480 if(!mFirstConfiguration){
481 clear_metadata_buffer(mParameters);
482
483 // Check if there is still pending buffer not yet returned.
484 if (hasPendingBuffers) {
485 for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
486 ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
487 __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
488 pendingBuffer.stream->height);
489 }
490 ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
491 uint8_t restart = TRUE;
492 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
493 restart);
494 }
495
496 //send the last unconfigure
497 cam_stream_size_info_t stream_config_info;
498 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
499 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
500 stream_config_info.buffer_info.max_buffers =
501 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
502 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
503 stream_config_info);
504
505 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
506 if (rc < 0) {
507 ALOGE("%s: set_parms failed for unconfigure", __func__);
508 }
509 }
510 deinitParameters();
511 }
512
513 if (mChannelHandle) {
514 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
515 mChannelHandle);
516 ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
517 mChannelHandle = 0;
518 }
519
520 if (mCameraOpened)
521 closeCamera();
522
523 mPendingBuffersMap.mPendingBufferList.clear();
524 mPendingReprocessResultList.clear();
525 for (pendingRequestIterator i = mPendingRequestsList.begin();
526 i != mPendingRequestsList.end();) {
527 i = erasePendingRequest(i);
528 }
529 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
530 if (mDefaultMetadata[i])
531 free_camera_metadata(mDefaultMetadata[i]);
532
533 m_perfLock.lock_rel();
534 m_perfLock.lock_deinit();
535
536 pthread_cond_destroy(&mRequestCond);
537
538 pthread_mutex_destroy(&mMutex);
539
540 if (hasPendingBuffers) {
541 ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
542 " Exiting here...", __func__);
543 exit(EXIT_FAILURE);
544 }
545 CDBG("%s: X", __func__);
546 }
547
548 /*===========================================================================
549 * FUNCTION : erasePendingRequest
550 *
551 * DESCRIPTION: function to erase a desired pending request after freeing any
552 * allocated memory
553 *
554 * PARAMETERS :
555 * @i : iterator pointing to pending request to be erased
556 *
557 * RETURN : iterator pointing to the next request
558 *==========================================================================*/
559 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)560 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
561 {
562 if (i->input_buffer != NULL) {
563 free(i->input_buffer);
564 i->input_buffer = NULL;
565 }
566 if (i->settings != NULL)
567 free_camera_metadata((camera_metadata_t*)i->settings);
568 return mPendingRequestsList.erase(i);
569 }
570
571 /*===========================================================================
572 * FUNCTION : camEvtHandle
573 *
574 * DESCRIPTION: Function registered to mm-camera-interface to handle events
575 *
576 * PARAMETERS :
577 * @camera_handle : interface layer camera handle
578 * @evt : ptr to event
579 * @user_data : user data ptr
580 *
581 * RETURN : none
582 *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)583 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
584 mm_camera_event_t *evt,
585 void *user_data)
586 {
587 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
588 if (obj && evt) {
589 switch(evt->server_event_type) {
590 case CAM_EVENT_TYPE_DAEMON_DIED:
591 ALOGE("%s: Fatal, camera daemon died", __func__);
592 //close the camera backend
593 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
594 && obj->mCameraHandle->ops) {
595 obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
596 } else {
597 ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
598 __func__);
599 }
600 camera3_notify_msg_t notify_msg;
601 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
602 notify_msg.type = CAMERA3_MSG_ERROR;
603 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
604 notify_msg.message.error.error_stream = NULL;
605 notify_msg.message.error.frame_number = 0;
606 obj->mCallbackOps->notify(obj->mCallbackOps, ¬ify_msg);
607 break;
608
609 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
610 CDBG("%s: HAL got request pull from Daemon", __func__);
611 pthread_mutex_lock(&obj->mMutex);
612 obj->mWokenUpByDaemon = true;
613 obj->unblockRequestIfNecessary();
614 pthread_mutex_unlock(&obj->mMutex);
615 break;
616
617 default:
618 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
619 evt->server_event_type);
620 break;
621 }
622 } else {
623 ALOGE("%s: NULL user_data/evt", __func__);
624 }
625 }
626
627 /*===========================================================================
628 * FUNCTION : openCamera
629 *
630 * DESCRIPTION: open camera
631 *
632 * PARAMETERS :
633 * @hw_device : double ptr for camera device struct
634 *
635 * RETURN : int32_t type of status
636 * NO_ERROR -- success
637 * none-zero failure code
638 *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)639 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
640 {
641 int rc = 0;
642 if (mCameraOpened) {
643 *hw_device = NULL;
644 return PERMISSION_DENIED;
645 }
646 m_perfLock.lock_acq();
647 rc = openCamera();
648 if (rc == 0) {
649 *hw_device = &mCameraDevice.common;
650 } else
651 *hw_device = NULL;
652
653 m_perfLock.lock_rel();
654 return rc;
655 }
656
657 /*===========================================================================
658 * FUNCTION : openCamera
659 *
660 * DESCRIPTION: open camera
661 *
662 * PARAMETERS : none
663 *
664 * RETURN : int32_t type of status
665 * NO_ERROR -- success
666 * none-zero failure code
667 *==========================================================================*/
openCamera()668 int QCamera3HardwareInterface::openCamera()
669 {
670 int rc = 0;
671
672 ATRACE_CALL();
673 if (mCameraHandle) {
674 ALOGE("Failure: Camera already opened");
675 return ALREADY_EXISTS;
676 }
677
678 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
679 if (rc < 0) {
680 ALOGE("%s: Failed to reserve flash for camera id: %d",
681 __func__,
682 mCameraId);
683 return UNKNOWN_ERROR;
684 }
685
686 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
687 if (rc) {
688 ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
689 return rc;
690 }
691
692 mCameraOpened = true;
693
694 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
695 camEvtHandle, (void *)this);
696
697 if (rc < 0) {
698 ALOGE("%s: Error, failed to register event callback", __func__);
699 /* Not closing camera here since it is already handled in destructor */
700 return FAILED_TRANSACTION;
701 }
702 mFirstConfiguration = true;
703 return NO_ERROR;
704 }
705
706 /*===========================================================================
707 * FUNCTION : closeCamera
708 *
709 * DESCRIPTION: close camera
710 *
711 * PARAMETERS : none
712 *
713 * RETURN : int32_t type of status
714 * NO_ERROR -- success
715 * none-zero failure code
716 *==========================================================================*/
closeCamera()717 int QCamera3HardwareInterface::closeCamera()
718 {
719 ATRACE_CALL();
720 int rc = NO_ERROR;
721
722 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
723 mCameraHandle = NULL;
724 mCameraOpened = false;
725
726 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
727 CDBG("%s: Failed to release flash for camera id: %d",
728 __func__,
729 mCameraId);
730 }
731
732 return rc;
733 }
734
735 /*===========================================================================
736 * FUNCTION : initialize
737 *
738 * DESCRIPTION: Initialize frameworks callback functions
739 *
740 * PARAMETERS :
741 * @callback_ops : callback function to frameworks
742 *
743 * RETURN :
744 *
745 *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)746 int QCamera3HardwareInterface::initialize(
747 const struct camera3_callback_ops *callback_ops)
748 {
749 ATRACE_CALL();
750 int rc;
751
752 pthread_mutex_lock(&mMutex);
753
754 rc = initParameters();
755 if (rc < 0) {
756 ALOGE("%s: initParamters failed %d", __func__, rc);
757 goto err1;
758 }
759 mCallbackOps = callback_ops;
760
761 mChannelHandle = mCameraHandle->ops->add_channel(
762 mCameraHandle->camera_handle, NULL, NULL, this);
763 if (mChannelHandle == 0) {
764 ALOGE("%s: add_channel failed", __func__);
765 rc = -ENOMEM;
766 pthread_mutex_unlock(&mMutex);
767 return rc;
768 }
769
770 pthread_mutex_unlock(&mMutex);
771 mCameraInitialized = true;
772 return 0;
773
774 err1:
775 pthread_mutex_unlock(&mMutex);
776 return rc;
777 }
778
779 /*===========================================================================
780 * FUNCTION : validateStreamDimensions
781 *
782 * DESCRIPTION: Check if the configuration requested are those advertised
783 *
784 * PARAMETERS :
785 * @stream_list : streams to be configured
786 *
787 * RETURN :
788 *
789 *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)790 int QCamera3HardwareInterface::validateStreamDimensions(
791 camera3_stream_configuration_t *streamList)
792 {
793 int rc = NO_ERROR;
794 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
795 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
796 size_t count = 0;
797
798 camera3_stream_t *inputStream = NULL;
799 /*
800 * Loop through all streams to find input stream if it exists*
801 */
802 for (size_t i = 0; i< streamList->num_streams; i++) {
803 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
804 if (inputStream != NULL) {
805 ALOGE("%s: Error, Multiple input streams requested");
806 return -EINVAL;
807 }
808 inputStream = streamList->streams[i];
809 }
810 }
811 /*
812 * Loop through all streams requested in configuration
813 * Check if unsupported sizes have been requested on any of them
814 */
815 for (size_t j = 0; j < streamList->num_streams; j++) {
816 bool sizeFound = false;
817 size_t jpeg_sizes_cnt = 0;
818 camera3_stream_t *newStream = streamList->streams[j];
819
820 uint32_t rotatedHeight = newStream->height;
821 uint32_t rotatedWidth = newStream->width;
822 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
823 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
824 rotatedHeight = newStream->width;
825 rotatedWidth = newStream->height;
826 }
827
828 /*
829 * Sizes are different for each type of stream format check against
830 * appropriate table.
831 */
832 switch (newStream->format) {
833 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
834 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
835 case HAL_PIXEL_FORMAT_RAW10:
836 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
837 for (size_t i = 0; i < count; i++) {
838 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
839 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
840 sizeFound = true;
841 break;
842 }
843 }
844 break;
845 case HAL_PIXEL_FORMAT_BLOB:
846 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
847 /* Generate JPEG sizes table */
848 makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
849 count,
850 MAX_SIZES_CNT,
851 available_processed_sizes);
852 jpeg_sizes_cnt = filterJpegSizes(
853 available_jpeg_sizes,
854 available_processed_sizes,
855 count * 2,
856 MAX_SIZES_CNT * 2,
857 gCamCapability[mCameraId]->active_array_size,
858 gCamCapability[mCameraId]->max_downscale_factor);
859
860 /* Verify set size against generated sizes table */
861 for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
862 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
863 ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
864 sizeFound = true;
865 break;
866 }
867 }
868 break;
869 case HAL_PIXEL_FORMAT_YCbCr_420_888:
870 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
871 default:
872 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
873 || newStream->stream_type == CAMERA3_STREAM_INPUT
874 || IS_USAGE_ZSL(newStream->usage)) {
875 if (((int32_t)rotatedWidth ==
876 gCamCapability[mCameraId]->active_array_size.width) &&
877 ((int32_t)rotatedHeight ==
878 gCamCapability[mCameraId]->active_array_size.height)) {
879 sizeFound = true;
880 break;
881 }
882 /* We could potentially break here to enforce ZSL stream
883 * set from frameworks always is full active array size
884 * but it is not clear from the spc if framework will always
885 * follow that, also we have logic to override to full array
886 * size, so keeping the logic lenient at the moment
887 */
888 }
889 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
890 MAX_SIZES_CNT);
891 for (size_t i = 0; i < count; i++) {
892 if (((int32_t)rotatedWidth ==
893 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
894 ((int32_t)rotatedHeight ==
895 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
896 sizeFound = true;
897 break;
898 }
899 }
900 break;
901 } /* End of switch(newStream->format) */
902
903 /* We error out even if a single stream has unsupported size set */
904 if (!sizeFound) {
905 ALOGE("%s: Error: Unsupported size of %d x %d requested for stream"
906 "type:%d", __func__, rotatedWidth, rotatedHeight,
907 newStream->format);
908 ALOGE("%s: Active array size is %d x %d", __func__,
909 gCamCapability[mCameraId]->active_array_size.width,
910 gCamCapability[mCameraId]->active_array_size.height);
911 rc = -EINVAL;
912 break;
913 }
914 } /* End of for each stream */
915 return rc;
916 }
917
918 /*==============================================================================
919 * FUNCTION : isSupportChannelNeeded
920 *
921 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
922 *
923 * PARAMETERS :
924 * @stream_list : streams to be configured
925 * @stream_config_info : the config info for streams to be configured
926 *
927 * RETURN : Boolen true/false decision
928 *
929 *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)930 bool QCamera3HardwareInterface::isSupportChannelNeeded(
931 camera3_stream_configuration_t *streamList,
932 cam_stream_size_info_t stream_config_info)
933 {
934 uint32_t i;
935 bool pprocRequested = false;
936 /* Check for conditions where PProc pipeline does not have any streams*/
937 for (i = 0; i < stream_config_info.num_streams; i++) {
938 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
939 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
940 pprocRequested = true;
941 break;
942 }
943 }
944
945 if (pprocRequested == false )
946 return true;
947
948 /* Dummy stream needed if only raw or jpeg streams present */
949 for (i = 0; i < streamList->num_streams; i++) {
950 switch(streamList->streams[i]->format) {
951 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
952 case HAL_PIXEL_FORMAT_RAW10:
953 case HAL_PIXEL_FORMAT_RAW16:
954 case HAL_PIXEL_FORMAT_BLOB:
955 break;
956 default:
957 return false;
958 }
959 }
960 return true;
961 }
962
963 /*==============================================================================
964 * FUNCTION : getSensorOutputSize
965 *
966 * DESCRIPTION: Get sensor output size based on current stream configuratoin
967 *
968 * PARAMETERS :
969 * @sensor_dim : sensor output dimension (output)
970 *
971 * RETURN : int32_t type of status
972 * NO_ERROR -- success
973 * none-zero failure code
974 *
975 *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)976 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
977 {
978 int32_t rc = NO_ERROR;
979
980 cam_dimension_t max_dim = {0, 0};
981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
982 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
983 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
984 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
985 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
986 }
987
988 clear_metadata_buffer(mParameters);
989
990 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
991 max_dim);
992 if (rc != NO_ERROR) {
993 ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
994 return rc;
995 }
996
997 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
998 if (rc != NO_ERROR) {
999 ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
1000 return rc;
1001 }
1002
1003 clear_metadata_buffer(mParameters);
1004 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1005
1006 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1007 mParameters);
1008 if (rc != NO_ERROR) {
1009 ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
1010 return rc;
1011 }
1012
1013 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1014 ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
1015
1016 return rc;
1017 }
1018
1019 /*==============================================================================
1020 * FUNCTION : enablePowerHint
1021 *
1022 * DESCRIPTION: enable single powerhint for preview and different video modes.
1023 *
1024 * PARAMETERS :
1025 *
1026 * RETURN : NULL
1027 *
1028 *==========================================================================*/
enablePowerHint()1029 void QCamera3HardwareInterface::enablePowerHint()
1030 {
1031 if (!mPowerHintEnabled) {
1032 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1033 mPowerHintEnabled = true;
1034 }
1035 }
1036
1037 /*==============================================================================
1038 * FUNCTION : disablePowerHint
1039 *
1040 * DESCRIPTION: disable current powerhint.
1041 *
1042 * PARAMETERS :
1043 *
1044 * RETURN : NULL
1045 *
1046 *==========================================================================*/
disablePowerHint()1047 void QCamera3HardwareInterface::disablePowerHint()
1048 {
1049 if (mPowerHintEnabled) {
1050 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1051 mPowerHintEnabled = false;
1052 }
1053 }
1054
1055 /*===========================================================================
1056 * FUNCTION : configureStreams
1057 *
1058 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1059 * and output streams.
1060 *
1061 * PARAMETERS :
1062 * @stream_list : streams to be configured
1063 *
1064 * RETURN :
1065 *
1066 *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1067 int QCamera3HardwareInterface::configureStreams(
1068 camera3_stream_configuration_t *streamList)
1069 {
1070 ATRACE_CALL();
1071 int rc = 0;
1072
1073 // Acquire perfLock before configure streams
1074 m_perfLock.lock_acq();
1075 rc = configureStreamsPerfLocked(streamList);
1076 m_perfLock.lock_rel();
1077
1078 return rc;
1079 }
1080
1081 /*===========================================================================
1082 * FUNCTION : configureStreamsPerfLocked
1083 *
1084 * DESCRIPTION: configureStreams while perfLock is held.
1085 *
1086 * PARAMETERS :
1087 * @stream_list : streams to be configured
1088 *
1089 * RETURN : int32_t type of status
1090 * NO_ERROR -- success
1091 * none-zero failure code
1092 *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1093 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1094 camera3_stream_configuration_t *streamList)
1095 {
1096 ATRACE_CALL();
1097 int rc = 0;
1098
1099 // Sanity check stream_list
1100 if (streamList == NULL) {
1101 ALOGE("%s: NULL stream configuration", __func__);
1102 return BAD_VALUE;
1103 }
1104 if (streamList->streams == NULL) {
1105 ALOGE("%s: NULL stream list", __func__);
1106 return BAD_VALUE;
1107 }
1108
1109 if (streamList->num_streams < 1) {
1110 ALOGE("%s: Bad number of streams requested: %d", __func__,
1111 streamList->num_streams);
1112 return BAD_VALUE;
1113 }
1114
1115 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1116 ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1117 MAX_NUM_STREAMS, streamList->num_streams);
1118 return BAD_VALUE;
1119 }
1120
1121 mOpMode = streamList->operation_mode;
1122 CDBG("%s: mOpMode: %d", __func__, mOpMode);
1123
1124 /* first invalidate all the steams in the mStreamList
1125 * if they appear again, they will be validated */
1126 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1127 it != mStreamInfo.end(); it++) {
1128 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1129 if (channel) {
1130 channel->stop();
1131 }
1132 (*it)->status = INVALID;
1133 }
1134
1135 if (mRawDumpChannel) {
1136 mRawDumpChannel->stop();
1137 delete mRawDumpChannel;
1138 mRawDumpChannel = NULL;
1139 }
1140
1141 if (mSupportChannel)
1142 mSupportChannel->stop();
1143
1144 if (mAnalysisChannel) {
1145 mAnalysisChannel->stop();
1146 }
1147 if (mMetadataChannel) {
1148 /* If content of mStreamInfo is not 0, there is metadata stream */
1149 mMetadataChannel->stop();
1150 }
1151 if (mChannelHandle) {
1152 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1153 mChannelHandle);
1154 ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1155 }
1156
1157 pthread_mutex_lock(&mMutex);
1158
1159 /* Check whether we have video stream */
1160 m_bIs4KVideo = false;
1161 m_bIsVideo = false;
1162 m_bEisSupportedSize = false;
1163 m_bTnrEnabled = false;
1164 bool isZsl = false;
1165 uint32_t videoWidth = 0U;
1166 uint32_t videoHeight = 0U;
1167 size_t rawStreamCnt = 0;
1168 size_t stallStreamCnt = 0;
1169 size_t processedStreamCnt = 0;
1170 // Number of streams on ISP encoder path
1171 size_t numStreamsOnEncoder = 0;
1172 size_t numYuv888OnEncoder = 0;
1173 bool bYuv888OverrideJpeg = false;
1174 cam_dimension_t largeYuv888Size = {0, 0};
1175 cam_dimension_t maxViewfinderSize = {0, 0};
1176 bool bJpegExceeds4K = false;
1177 bool bJpegOnEncoder = false;
1178 bool bUseCommonFeatureMask = false;
1179 uint32_t commonFeatureMask = 0;
1180 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1181 camera3_stream_t *inputStream = NULL;
1182 bool isJpeg = false;
1183 cam_dimension_t jpegSize = {0, 0};
1184
1185 /*EIS configuration*/
1186 bool eisSupported = false;
1187 bool oisSupported = false;
1188 int32_t margin_index = -1;
1189 uint8_t eis_prop_set;
1190 uint32_t maxEisWidth = 0;
1191 uint32_t maxEisHeight = 0;
1192 int32_t hal_version = CAM_HAL_V3;
1193
1194 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1195
1196 size_t count = IS_TYPE_MAX;
1197 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1198 for (size_t i = 0; i < count; i++) {
1199 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1200 eisSupported = true;
1201 margin_index = (int32_t)i;
1202 break;
1203 }
1204 }
1205
1206 count = CAM_OPT_STAB_MAX;
1207 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1208 for (size_t i = 0; i < count; i++) {
1209 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1210 oisSupported = true;
1211 break;
1212 }
1213 }
1214
1215 if (eisSupported) {
1216 maxEisWidth = MAX_EIS_WIDTH;
1217 maxEisHeight = MAX_EIS_HEIGHT;
1218 }
1219
1220 /* EIS setprop control */
1221 char eis_prop[PROPERTY_VALUE_MAX];
1222 memset(eis_prop, 0, sizeof(eis_prop));
1223 property_get("persist.camera.eis.enable", eis_prop, "0");
1224 eis_prop_set = (uint8_t)atoi(eis_prop);
1225
1226 m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1227 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1228
1229 /* stream configurations */
1230 for (size_t i = 0; i < streamList->num_streams; i++) {
1231 camera3_stream_t *newStream = streamList->streams[i];
1232 ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1233 "height = %d, rotation = %d, usage = 0x%x",
1234 __func__, i, newStream->stream_type, newStream->format,
1235 newStream->width, newStream->height, newStream->rotation,
1236 newStream->usage);
1237 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1238 newStream->stream_type == CAMERA3_STREAM_INPUT){
1239 isZsl = true;
1240 }
1241 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1242 inputStream = newStream;
1243 }
1244
1245 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1246 isJpeg = true;
1247 jpegSize.width = newStream->width;
1248 jpegSize.height = newStream->height;
1249 if (newStream->width > VIDEO_4K_WIDTH ||
1250 newStream->height > VIDEO_4K_HEIGHT)
1251 bJpegExceeds4K = true;
1252 }
1253
1254 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1255 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1256 m_bIsVideo = true;
1257 videoWidth = newStream->width;
1258 videoHeight = newStream->height;
1259 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1260 (VIDEO_4K_HEIGHT <= newStream->height)) {
1261 m_bIs4KVideo = true;
1262 }
1263 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1264 (newStream->height <= maxEisHeight);
1265 }
1266 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1267 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1268 switch (newStream->format) {
1269 case HAL_PIXEL_FORMAT_BLOB:
1270 stallStreamCnt++;
1271 if (isOnEncoder(maxViewfinderSize, newStream->width,
1272 newStream->height)) {
1273 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1274 numStreamsOnEncoder++;
1275 bJpegOnEncoder = true;
1276 }
1277 break;
1278 case HAL_PIXEL_FORMAT_RAW10:
1279 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1280 case HAL_PIXEL_FORMAT_RAW16:
1281 rawStreamCnt++;
1282 break;
1283 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1284 processedStreamCnt++;
1285 if (isOnEncoder(maxViewfinderSize, newStream->width,
1286 newStream->height)) {
1287 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1288 IS_USAGE_ZSL(newStream->usage)) {
1289 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1290 } else {
1291 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1292 }
1293 numStreamsOnEncoder++;
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 processedStreamCnt++;
1298 if (isOnEncoder(maxViewfinderSize, newStream->width,
1299 newStream->height)) {
1300 // If Yuv888 size is not greater than 4K, set feature mask
1301 // to SUPERSET so that it support concurrent request on
1302 // YUV and JPEG.
1303 if (newStream->width <= VIDEO_4K_WIDTH &&
1304 newStream->height <= VIDEO_4K_HEIGHT) {
1305 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1306 } else {
1307 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1308 }
1309 numStreamsOnEncoder++;
1310 numYuv888OnEncoder++;
1311 largeYuv888Size.width = newStream->width;
1312 largeYuv888Size.height = newStream->height;
1313 }
1314 break;
1315 default:
1316 processedStreamCnt++;
1317 if (isOnEncoder(maxViewfinderSize, newStream->width,
1318 newStream->height)) {
1319 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1320 numStreamsOnEncoder++;
1321 }
1322 break;
1323 }
1324
1325 }
1326 }
1327
1328 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1329 !m_bIsVideo) {
1330 m_bEisEnable = false;
1331 }
1332
1333 /* Logic to enable/disable TNR based on specific config size/etc.*/
1334 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1335 ((videoWidth == 1920 && videoHeight == 1080) ||
1336 (videoWidth == 1280 && videoHeight == 720)) &&
1337 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1338 m_bTnrEnabled = true;
1339
1340 /* Check if num_streams is sane */
1341 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1342 rawStreamCnt > MAX_RAW_STREAMS ||
1343 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1344 ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1345 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1346 pthread_mutex_unlock(&mMutex);
1347 return -EINVAL;
1348 }
1349 /* Check whether we have zsl stream or 4k video case */
1350 if (isZsl && m_bIsVideo) {
1351 ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1352 pthread_mutex_unlock(&mMutex);
1353 return -EINVAL;
1354 }
1355 /* Check if stream sizes are sane */
1356 if (numStreamsOnEncoder > 2) {
1357 ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1358 __func__);
1359 pthread_mutex_unlock(&mMutex);
1360 return -EINVAL;
1361 } else if (1 < numStreamsOnEncoder){
1362 bUseCommonFeatureMask = true;
1363 CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1364 __func__);
1365 }
1366
1367 /* Check if BLOB size is greater than 4k in 4k recording case */
1368 if (m_bIs4KVideo && bJpegExceeds4K) {
1369 ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1370 __func__);
1371 pthread_mutex_unlock(&mMutex);
1372 return -EINVAL;
1373 }
1374
1375 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1376 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1377 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1378 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1379 // configurations:
1380 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1381 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1382 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1383 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1384 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1385 __func__);
1386 pthread_mutex_unlock(&mMutex);
1387 return -EINVAL;
1388 }
1389
1390 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1391 // the YUV stream's size is greater or equal to the JPEG size, set common
1392 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1393 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1394 jpegSize.width, jpegSize.height) &&
1395 largeYuv888Size.width > jpegSize.width &&
1396 largeYuv888Size.height > jpegSize.height) {
1397 bYuv888OverrideJpeg = true;
1398 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1399 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1400 }
1401
1402 rc = validateStreamDimensions(streamList);
1403 if (rc == NO_ERROR) {
1404 rc = validateStreamRotations(streamList);
1405 }
1406 if (rc != NO_ERROR) {
1407 ALOGE("%s: Invalid stream configuration requested!", __func__);
1408 pthread_mutex_unlock(&mMutex);
1409 return rc;
1410 }
1411
1412 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1413 camera3_stream_t *jpegStream = NULL;
1414 for (size_t i = 0; i < streamList->num_streams; i++) {
1415 camera3_stream_t *newStream = streamList->streams[i];
1416 CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1417 "stream size : %d x %d, stream rotation = %d",
1418 __func__, newStream->stream_type, newStream->format,
1419 newStream->width, newStream->height, newStream->rotation);
1420 //if the stream is in the mStreamList validate it
1421 bool stream_exists = false;
1422 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1423 it != mStreamInfo.end(); it++) {
1424 if ((*it)->stream == newStream) {
1425 QCamera3ProcessingChannel *channel =
1426 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1427 stream_exists = true;
1428 if (channel)
1429 delete channel;
1430 (*it)->status = VALID;
1431 (*it)->stream->priv = NULL;
1432 (*it)->channel = NULL;
1433 }
1434 }
1435 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1436 //new stream
1437 stream_info_t* stream_info;
1438 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1439 if (!stream_info) {
1440 ALOGE("%s: Could not allocate stream info", __func__);
1441 rc = -ENOMEM;
1442 pthread_mutex_unlock(&mMutex);
1443 return rc;
1444 }
1445 stream_info->stream = newStream;
1446 stream_info->status = VALID;
1447 stream_info->channel = NULL;
1448 mStreamInfo.push_back(stream_info);
1449 }
1450 /* Covers Opaque ZSL and API1 F/W ZSL */
1451 if (IS_USAGE_ZSL(newStream->usage)
1452 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1453 if (zslStream != NULL) {
1454 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1455 pthread_mutex_unlock(&mMutex);
1456 return BAD_VALUE;
1457 }
1458 zslStream = newStream;
1459 }
1460 /* Covers YUV reprocess */
1461 if (inputStream != NULL) {
1462 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1463 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1464 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1465 && inputStream->width == newStream->width
1466 && inputStream->height == newStream->height) {
1467 if (zslStream != NULL) {
1468 /* This scenario indicates multiple YUV streams with same size
1469 * as input stream have been requested, since zsl stream handle
1470 * is solely use for the purpose of overriding the size of streams
1471 * which share h/w streams we will just make a guess here as to
1472 * which of the stream is a ZSL stream, this will be refactored
1473 * once we make generic logic for streams sharing encoder output
1474 */
1475 CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1476 }
1477 zslStream = newStream;
1478 }
1479 }
1480 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1481 jpegStream = newStream;
1482 }
1483 }
1484
1485 /* If a zsl stream is set, we know that we have configured at least one input or
1486 bidirectional stream */
1487 if (NULL != zslStream) {
1488 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1489 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1490 mInputStreamInfo.format = zslStream->format;
1491 mInputStreamInfo.usage = zslStream->usage;
1492 CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1493 __func__, mInputStreamInfo.dim.width,
1494 mInputStreamInfo.dim.height,
1495 mInputStreamInfo.format, mInputStreamInfo.usage);
1496 }
1497
1498 cleanAndSortStreamInfo();
1499 if (mMetadataChannel) {
1500 delete mMetadataChannel;
1501 mMetadataChannel = NULL;
1502 }
1503 if (mSupportChannel) {
1504 delete mSupportChannel;
1505 mSupportChannel = NULL;
1506 }
1507
1508 if (mAnalysisChannel) {
1509 delete mAnalysisChannel;
1510 mAnalysisChannel = NULL;
1511 }
1512
1513 if (mDummyBatchChannel) {
1514 delete mDummyBatchChannel;
1515 mDummyBatchChannel = NULL;
1516 }
1517
1518 //Create metadata channel and initialize it
1519 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1520 mChannelHandle, mCameraHandle->ops, captureResultCb,
1521 &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1522 if (mMetadataChannel == NULL) {
1523 ALOGE("%s: failed to allocate metadata channel", __func__);
1524 rc = -ENOMEM;
1525 pthread_mutex_unlock(&mMutex);
1526 return rc;
1527 }
1528 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1529 if (rc < 0) {
1530 ALOGE("%s: metadata channel initialization failed", __func__);
1531 delete mMetadataChannel;
1532 mMetadataChannel = NULL;
1533 pthread_mutex_unlock(&mMutex);
1534 return rc;
1535 }
1536
1537 // Create analysis stream all the time, even when h/w support is not available
1538 {
1539 mAnalysisChannel = new QCamera3SupportChannel(
1540 mCameraHandle->camera_handle,
1541 mChannelHandle,
1542 mCameraHandle->ops,
1543 &gCamCapability[mCameraId]->padding_info,
1544 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1545 CAM_STREAM_TYPE_ANALYSIS,
1546 &gCamCapability[mCameraId]->analysis_recommended_res,
1547 gCamCapability[mCameraId]->analysis_recommended_format,
1548 this,
1549 0); // force buffer count to 0
1550 if (!mAnalysisChannel) {
1551 ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1552 pthread_mutex_unlock(&mMutex);
1553 return -ENOMEM;
1554 }
1555 }
1556
1557 bool isRawStreamRequested = false;
1558 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1559 /* Allocate channel objects for the requested streams */
1560 for (size_t i = 0; i < streamList->num_streams; i++) {
1561 camera3_stream_t *newStream = streamList->streams[i];
1562 uint32_t stream_usage = newStream->usage;
1563 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1564 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1565 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1566 || IS_USAGE_ZSL(newStream->usage)) &&
1567 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1568 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1569 if (bUseCommonFeatureMask) {
1570 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1571 commonFeatureMask;
1572 } else {
1573 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1574 CAM_QCOM_FEATURE_NONE;
1575 }
1576
1577 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1578 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1579 } else {
1580 //for non zsl streams find out the format
1581 switch (newStream->format) {
1582 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1583 {
1584 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1585 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1586
1587 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1588
1589 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1590 if (m_bTnrEnabled && m_bTnrVideo) {
1591 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1592 CAM_QCOM_FEATURE_CPP_TNR;
1593 }
1594
1595 } else {
1596
1597 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1598 if (m_bTnrEnabled && m_bTnrPreview) {
1599 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1600 CAM_QCOM_FEATURE_CPP_TNR;
1601 }
1602 }
1603
1604 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1605 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1606 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1607 newStream->height;
1608 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1609 newStream->width;
1610 }
1611 }
1612 break;
1613 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1614 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1615 if (isOnEncoder(maxViewfinderSize, newStream->width,
1616 newStream->height)) {
1617 if (bUseCommonFeatureMask)
1618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1619 commonFeatureMask;
1620 else
1621 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1622 CAM_QCOM_FEATURE_NONE;
1623 } else {
1624 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1625 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1626 }
1627 break;
1628 case HAL_PIXEL_FORMAT_BLOB:
1629 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1630 if (m_bIs4KVideo && !isZsl) {
1631 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1632 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1633 } else {
1634 if (bUseCommonFeatureMask &&
1635 isOnEncoder(maxViewfinderSize, newStream->width,
1636 newStream->height)) {
1637 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1638 } else {
1639 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1640 }
1641 }
1642 if (isZsl) {
1643 if (zslStream) {
1644 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1645 (int32_t)zslStream->width;
1646 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1647 (int32_t)zslStream->height;
1648 } else {
1649 ALOGE("%s: Error, No ZSL stream identified",__func__);
1650 pthread_mutex_unlock(&mMutex);
1651 return -EINVAL;
1652 }
1653 } else if (m_bIs4KVideo) {
1654 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1655 (int32_t)videoWidth;
1656 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1657 (int32_t)videoHeight;
1658 } else if (bYuv888OverrideJpeg) {
1659 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1660 (int32_t)largeYuv888Size.width;
1661 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1662 (int32_t)largeYuv888Size.height;
1663 }
1664 break;
1665 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1666 case HAL_PIXEL_FORMAT_RAW16:
1667 case HAL_PIXEL_FORMAT_RAW10:
1668 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1669 isRawStreamRequested = true;
1670 break;
1671 default:
1672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1673 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1674 break;
1675 }
1676
1677 }
1678
1679 if (newStream->priv == NULL) {
1680 //New stream, construct channel
1681 switch (newStream->stream_type) {
1682 case CAMERA3_STREAM_INPUT:
1683 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1684 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1685 break;
1686 case CAMERA3_STREAM_BIDIRECTIONAL:
1687 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1688 GRALLOC_USAGE_HW_CAMERA_WRITE;
1689 break;
1690 case CAMERA3_STREAM_OUTPUT:
1691 /* For video encoding stream, set read/write rarely
1692 * flag so that they may be set to un-cached */
1693 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1694 newStream->usage |=
1695 (GRALLOC_USAGE_SW_READ_RARELY |
1696 GRALLOC_USAGE_SW_WRITE_RARELY |
1697 GRALLOC_USAGE_HW_CAMERA_WRITE);
1698 else if (IS_USAGE_ZSL(newStream->usage))
1699 CDBG("%s: ZSL usage flag skipping", __func__);
1700 else if (newStream == zslStream
1701 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1702 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1703 } else
1704 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1705 break;
1706 default:
1707 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1708 break;
1709 }
1710
1711 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1712 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1713 QCamera3ProcessingChannel *channel = NULL;
1714 switch (newStream->format) {
1715 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1716 if ((newStream->usage &
1717 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1718 (streamList->operation_mode ==
1719 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1720 ) {
1721 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1722 mChannelHandle, mCameraHandle->ops, captureResultCb,
1723 &gCamCapability[mCameraId]->padding_info,
1724 this,
1725 newStream,
1726 (cam_stream_type_t)
1727 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1728 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1729 mMetadataChannel,
1730 0); //heap buffers are not required for HFR video channel
1731 if (channel == NULL) {
1732 ALOGE("%s: allocation of channel failed", __func__);
1733 pthread_mutex_unlock(&mMutex);
1734 return -ENOMEM;
1735 }
1736 //channel->getNumBuffers() will return 0 here so use
1737 //MAX_INFLIGH_HFR_REQUESTS
1738 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1739 newStream->priv = channel;
1740 ALOGI("%s: num video buffers in HFR mode: %d",
1741 __func__, MAX_INFLIGHT_HFR_REQUESTS);
1742 } else {
1743 /* Copy stream contents in HFR preview only case to create
1744 * dummy batch channel so that sensor streaming is in
1745 * HFR mode */
1746 if (!m_bIsVideo && (streamList->operation_mode ==
1747 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1748 mDummyBatchStream = *newStream;
1749 }
1750 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1751 mChannelHandle, mCameraHandle->ops, captureResultCb,
1752 &gCamCapability[mCameraId]->padding_info,
1753 this,
1754 newStream,
1755 (cam_stream_type_t)
1756 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1757 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1758 mMetadataChannel,
1759 MAX_INFLIGHT_REQUESTS);
1760 if (channel == NULL) {
1761 ALOGE("%s: allocation of channel failed", __func__);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENOMEM;
1764 }
1765 newStream->max_buffers = channel->getNumBuffers();
1766 newStream->priv = channel;
1767 }
1768 break;
1769 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1770 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1771 mChannelHandle,
1772 mCameraHandle->ops, captureResultCb,
1773 &gCamCapability[mCameraId]->padding_info,
1774 this,
1775 newStream,
1776 (cam_stream_type_t)
1777 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1778 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1779 mMetadataChannel);
1780 if (channel == NULL) {
1781 ALOGE("%s: allocation of YUV channel failed", __func__);
1782 pthread_mutex_unlock(&mMutex);
1783 return -ENOMEM;
1784 }
1785 newStream->max_buffers = channel->getNumBuffers();
1786 newStream->priv = channel;
1787 break;
1788 }
1789 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1790 case HAL_PIXEL_FORMAT_RAW16:
1791 case HAL_PIXEL_FORMAT_RAW10:
1792 mRawChannel = new QCamera3RawChannel(
1793 mCameraHandle->camera_handle, mChannelHandle,
1794 mCameraHandle->ops, captureResultCb,
1795 &gCamCapability[mCameraId]->padding_info,
1796 this, newStream, CAM_QCOM_FEATURE_NONE,
1797 mMetadataChannel,
1798 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1799 if (mRawChannel == NULL) {
1800 ALOGE("%s: allocation of raw channel failed", __func__);
1801 pthread_mutex_unlock(&mMutex);
1802 return -ENOMEM;
1803 }
1804 newStream->max_buffers = mRawChannel->getNumBuffers();
1805 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1806 break;
1807 case HAL_PIXEL_FORMAT_BLOB:
1808 // Max live snapshot inflight buffer is 1. This is to mitigate
1809 // frame drop issues for video snapshot. The more buffers being
1810 // allocated, the more frame drops there are.
1811 mPictureChannel = new QCamera3PicChannel(
1812 mCameraHandle->camera_handle, mChannelHandle,
1813 mCameraHandle->ops, captureResultCb,
1814 &gCamCapability[mCameraId]->padding_info, this, newStream,
1815 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1816 m_bIs4KVideo, isZsl, mMetadataChannel,
1817 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1818 if (mPictureChannel == NULL) {
1819 ALOGE("%s: allocation of channel failed", __func__);
1820 pthread_mutex_unlock(&mMutex);
1821 return -ENOMEM;
1822 }
1823 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1824 newStream->max_buffers = mPictureChannel->getNumBuffers();
1825 mPictureChannel->overrideYuvSize(
1826 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1827 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1828 break;
1829
1830 default:
1831 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1832 break;
1833 }
1834 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1835 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1836 } else {
1837 ALOGE("%s: Error, Unknown stream type", __func__);
1838 return -EINVAL;
1839 }
1840
1841 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1842 it != mStreamInfo.end(); it++) {
1843 if ((*it)->stream == newStream) {
1844 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1845 break;
1846 }
1847 }
1848 } else {
1849 // Channel already exists for this stream
1850 // Do nothing for now
1851 }
1852
1853 /* Do not add entries for input stream in metastream info
1854 * since there is no real stream associated with it
1855 */
1856 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1857 mStreamConfigInfo.num_streams++;
1858 }
1859
1860 //RAW DUMP channel
1861 if (mEnableRawDump && isRawStreamRequested == false){
1862 cam_dimension_t rawDumpSize;
1863 rawDumpSize = getMaxRawSize(mCameraId);
1864 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1865 mChannelHandle,
1866 mCameraHandle->ops,
1867 rawDumpSize,
1868 &gCamCapability[mCameraId]->padding_info,
1869 this, CAM_QCOM_FEATURE_NONE);
1870 if (!mRawDumpChannel) {
1871 ALOGE("%s: Raw Dump channel cannot be created", __func__);
1872 pthread_mutex_unlock(&mMutex);
1873 return -ENOMEM;
1874 }
1875 }
1876
1877
1878 if (mAnalysisChannel) {
1879 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1880 gCamCapability[mCameraId]->analysis_recommended_res;
1881 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1882 CAM_STREAM_TYPE_ANALYSIS;
1883 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1884 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1885 mStreamConfigInfo.num_streams++;
1886 }
1887
1888 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1889 mSupportChannel = new QCamera3SupportChannel(
1890 mCameraHandle->camera_handle,
1891 mChannelHandle,
1892 mCameraHandle->ops,
1893 &gCamCapability[mCameraId]->padding_info,
1894 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1895 CAM_STREAM_TYPE_CALLBACK,
1896 &QCamera3SupportChannel::kDim,
1897 CAM_FORMAT_YUV_420_NV21,
1898 this);
1899 if (!mSupportChannel) {
1900 ALOGE("%s: dummy channel cannot be created", __func__);
1901 pthread_mutex_unlock(&mMutex);
1902 return -ENOMEM;
1903 }
1904 }
1905
1906 if (mSupportChannel) {
1907 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1908 QCamera3SupportChannel::kDim;
1909 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1910 CAM_STREAM_TYPE_CALLBACK;
1911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1912 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 mStreamConfigInfo.num_streams++;
1914 }
1915
1916 if (mRawDumpChannel) {
1917 cam_dimension_t rawSize;
1918 rawSize = getMaxRawSize(mCameraId);
1919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1920 rawSize;
1921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1922 CAM_STREAM_TYPE_RAW;
1923 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1924 CAM_QCOM_FEATURE_NONE;
1925 mStreamConfigInfo.num_streams++;
1926 }
1927 /* In HFR mode, if video stream is not added, create a dummy channel so that
1928 * ISP can create a batch mode even for preview only case. This channel is
1929 * never 'start'ed (no stream-on), it is only 'initialized' */
1930 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1931 !m_bIsVideo) {
1932 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1933 mChannelHandle,
1934 mCameraHandle->ops, captureResultCb,
1935 &gCamCapability[mCameraId]->padding_info,
1936 this,
1937 &mDummyBatchStream,
1938 CAM_STREAM_TYPE_VIDEO,
1939 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1940 mMetadataChannel);
1941 if (NULL == mDummyBatchChannel) {
1942 ALOGE("%s: creation of mDummyBatchChannel failed."
1943 "Preview will use non-hfr sensor mode ", __func__);
1944 }
1945 }
1946 if (mDummyBatchChannel) {
1947 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1948 mDummyBatchStream.width;
1949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1950 mDummyBatchStream.height;
1951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1952 CAM_STREAM_TYPE_VIDEO;
1953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1954 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1955 mStreamConfigInfo.num_streams++;
1956 }
1957
1958 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1959 mStreamConfigInfo.buffer_info.max_buffers =
1960 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1961
1962 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1963 for (pendingRequestIterator i = mPendingRequestsList.begin();
1964 i != mPendingRequestsList.end();) {
1965 i = erasePendingRequest(i);
1966 }
1967 mPendingFrameDropList.clear();
1968 // Initialize/Reset the pending buffers list
1969 mPendingBuffersMap.num_buffers = 0;
1970 mPendingBuffersMap.mPendingBufferList.clear();
1971 mPendingReprocessResultList.clear();
1972
1973 mFirstRequest = true;
1974 mCurJpegMeta.clear();
1975 //Get min frame duration for this streams configuration
1976 deriveMinFrameDuration();
1977
1978 /* Turn on video hint only if video stream is configured */
1979
1980 pthread_mutex_unlock(&mMutex);
1981
1982 return rc;
1983 }
1984
1985 /*===========================================================================
1986 * FUNCTION : validateCaptureRequest
1987 *
1988 * DESCRIPTION: validate a capture request from camera service
1989 *
1990 * PARAMETERS :
1991 * @request : request from framework to process
1992 *
1993 * RETURN :
1994 *
1995 *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1996 int QCamera3HardwareInterface::validateCaptureRequest(
1997 camera3_capture_request_t *request)
1998 {
1999 ssize_t idx = 0;
2000 const camera3_stream_buffer_t *b;
2001 CameraMetadata meta;
2002
2003 /* Sanity check the request */
2004 if (request == NULL) {
2005 ALOGE("%s: NULL capture request", __func__);
2006 return BAD_VALUE;
2007 }
2008
2009 if (request->settings == NULL && mFirstRequest) {
2010 /*settings cannot be null for the first request*/
2011 return BAD_VALUE;
2012 }
2013
2014 uint32_t frameNumber = request->frame_number;
2015 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2016 ALOGE("%s: Request %d: No output buffers provided!",
2017 __FUNCTION__, frameNumber);
2018 return BAD_VALUE;
2019 }
2020 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2021 ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
2022 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
2023 return BAD_VALUE;
2024 }
2025 if (request->input_buffer != NULL) {
2026 b = request->input_buffer;
2027 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2028 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2029 __func__, frameNumber, (long)idx);
2030 return BAD_VALUE;
2031 }
2032 if (b->release_fence != -1) {
2033 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2034 __func__, frameNumber, (long)idx);
2035 return BAD_VALUE;
2036 }
2037 if (b->buffer == NULL) {
2038 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2039 __func__, frameNumber, (long)idx);
2040 return BAD_VALUE;
2041 }
2042 }
2043
2044 // Validate all buffers
2045 b = request->output_buffers;
2046 do {
2047 QCamera3ProcessingChannel *channel =
2048 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2049 if (channel == NULL) {
2050 ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2051 __func__, frameNumber, (long)idx);
2052 return BAD_VALUE;
2053 }
2054 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2055 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2056 __func__, frameNumber, (long)idx);
2057 return BAD_VALUE;
2058 }
2059 if (b->release_fence != -1) {
2060 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2061 __func__, frameNumber, (long)idx);
2062 return BAD_VALUE;
2063 }
2064 if (b->buffer == NULL) {
2065 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2066 __func__, frameNumber, (long)idx);
2067 return BAD_VALUE;
2068 }
2069 if (*(b->buffer) == NULL) {
2070 ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2071 __func__, frameNumber, (long)idx);
2072 return BAD_VALUE;
2073 }
2074 idx++;
2075 b = request->output_buffers + idx;
2076 } while (idx < (ssize_t)request->num_output_buffers);
2077
2078 return NO_ERROR;
2079 }
2080
2081 /*===========================================================================
2082 * FUNCTION : deriveMinFrameDuration
2083 *
2084 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2085 * on currently configured streams.
2086 *
2087 * PARAMETERS : NONE
2088 *
2089 * RETURN : NONE
2090 *
2091 *==========================================================================*/
deriveMinFrameDuration()2092 void QCamera3HardwareInterface::deriveMinFrameDuration()
2093 {
2094 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2095
2096 maxJpegDim = 0;
2097 maxProcessedDim = 0;
2098 maxRawDim = 0;
2099
2100 // Figure out maximum jpeg, processed, and raw dimensions
2101 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2102 it != mStreamInfo.end(); it++) {
2103
2104 // Input stream doesn't have valid stream_type
2105 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2106 continue;
2107
2108 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2109 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2110 if (dimension > maxJpegDim)
2111 maxJpegDim = dimension;
2112 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2113 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2114 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2115 if (dimension > maxRawDim)
2116 maxRawDim = dimension;
2117 } else {
2118 if (dimension > maxProcessedDim)
2119 maxProcessedDim = dimension;
2120 }
2121 }
2122
2123 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2124 MAX_SIZES_CNT);
2125
2126 //Assume all jpeg dimensions are in processed dimensions.
2127 if (maxJpegDim > maxProcessedDim)
2128 maxProcessedDim = maxJpegDim;
2129 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2130 if (maxProcessedDim > maxRawDim) {
2131 maxRawDim = INT32_MAX;
2132
2133 for (size_t i = 0; i < count; i++) {
2134 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2135 gCamCapability[mCameraId]->raw_dim[i].height;
2136 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2137 maxRawDim = dimension;
2138 }
2139 }
2140
2141 //Find minimum durations for processed, jpeg, and raw
2142 for (size_t i = 0; i < count; i++) {
2143 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2144 gCamCapability[mCameraId]->raw_dim[i].height) {
2145 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2146 break;
2147 }
2148 }
2149 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2150 for (size_t i = 0; i < count; i++) {
2151 if (maxProcessedDim ==
2152 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2153 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2154 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2155 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2156 break;
2157 }
2158 }
2159 }
2160
2161 /*===========================================================================
2162 * FUNCTION : getMinFrameDuration
2163 *
2164 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2165 * and current request configuration.
2166 *
2167 * PARAMETERS : @request: requset sent by the frameworks
2168 *
2169 * RETURN : min farme duration for a particular request
2170 *
2171 *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2172 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2173 {
2174 bool hasJpegStream = false;
2175 bool hasRawStream = false;
2176 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2177 const camera3_stream_t *stream = request->output_buffers[i].stream;
2178 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2179 hasJpegStream = true;
2180 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2181 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2182 stream->format == HAL_PIXEL_FORMAT_RAW16)
2183 hasRawStream = true;
2184 }
2185
2186 if (!hasJpegStream)
2187 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2188 else
2189 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2190 }
2191
2192 /*===========================================================================
2193 * FUNCTION : handlePendingReprocResults
2194 *
2195 * DESCRIPTION: check and notify on any pending reprocess results
2196 *
2197 * PARAMETERS :
2198 * @frame_number : Pending request frame number
2199 *
2200 * RETURN : int32_t type of status
2201 * NO_ERROR -- success
2202 * none-zero failure code
2203 *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2204 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2205 {
2206 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2207 j != mPendingReprocessResultList.end(); j++) {
2208 if (j->frame_number == frame_number) {
2209 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2210
2211 CDBG("%s: Delayed reprocess notify %d", __func__,
2212 frame_number);
2213
2214 for (pendingRequestIterator k = mPendingRequestsList.begin();
2215 k != mPendingRequestsList.end(); k++) {
2216
2217 if (k->frame_number == j->frame_number) {
2218 CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2219 "Take it out!!", __func__,
2220 k->frame_number);
2221
2222 camera3_capture_result result;
2223 memset(&result, 0, sizeof(camera3_capture_result));
2224 result.frame_number = frame_number;
2225 result.num_output_buffers = 1;
2226 result.output_buffers = &j->buffer;
2227 result.input_buffer = k->input_buffer;
2228 result.result = k->settings;
2229 result.partial_result = PARTIAL_RESULT_COUNT;
2230 mCallbackOps->process_capture_result(mCallbackOps, &result);
2231
2232 erasePendingRequest(k);
2233 break;
2234 }
2235 }
2236 mPendingReprocessResultList.erase(j);
2237 break;
2238 }
2239 }
2240 return NO_ERROR;
2241 }
2242
2243 /*===========================================================================
2244 * FUNCTION : handleBatchMetadata
2245 *
2246 * DESCRIPTION: Handles metadata buffer callback in batch mode
2247 *
2248 * PARAMETERS : @metadata_buf: metadata buffer
2249 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2250 * the meta buf in this method
2251 *
2252 * RETURN :
2253 *
2254 *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2255 void QCamera3HardwareInterface::handleBatchMetadata(
2256 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2257 {
2258 ATRACE_CALL();
2259
2260 if (NULL == metadata_buf) {
2261 ALOGE("%s: metadata_buf is NULL", __func__);
2262 return;
2263 }
2264 /* In batch mode, the metdata will contain the frame number and timestamp of
2265 * the last frame in the batch. Eg: a batch containing buffers from request
2266 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2267 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2268 * multiple process_capture_results */
2269 metadata_buffer_t *metadata =
2270 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2271 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2272 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2273 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2274 uint32_t frame_number = 0, urgent_frame_number = 0;
2275 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2276 bool invalid_metadata = false;
2277 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2278 size_t loopCount = 1;
2279
2280 int32_t *p_frame_number_valid =
2281 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2282 uint32_t *p_frame_number =
2283 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2284 int64_t *p_capture_time =
2285 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2286 int32_t *p_urgent_frame_number_valid =
2287 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2288 uint32_t *p_urgent_frame_number =
2289 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2290
2291 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2292 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2293 (NULL == p_urgent_frame_number)) {
2294 ALOGE("%s: Invalid metadata", __func__);
2295 invalid_metadata = true;
2296 } else {
2297 frame_number_valid = *p_frame_number_valid;
2298 last_frame_number = *p_frame_number;
2299 last_frame_capture_time = *p_capture_time;
2300 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2301 last_urgent_frame_number = *p_urgent_frame_number;
2302 }
2303
2304 /* In batchmode, when no video buffers are requested, set_parms are sent
2305 * for every capture_request. The difference between consecutive urgent
2306 * frame numbers and frame numbers should be used to interpolate the
2307 * corresponding frame numbers and time stamps */
2308 pthread_mutex_lock(&mMutex);
2309 if (urgent_frame_number_valid) {
2310 first_urgent_frame_number =
2311 mPendingBatchMap.valueFor(last_urgent_frame_number);
2312 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2313 first_urgent_frame_number;
2314
2315 CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
2316 __func__, urgent_frame_number_valid,
2317 first_urgent_frame_number, last_urgent_frame_number);
2318 }
2319
2320 if (frame_number_valid) {
2321 first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2322 frameNumDiff = last_frame_number + 1 -
2323 first_frame_number;
2324 mPendingBatchMap.removeItem(last_frame_number);
2325
2326 CDBG_HIGH("%s: frm: valid: %d frm_num: %d - %d",
2327 __func__, frame_number_valid,
2328 first_frame_number, last_frame_number);
2329
2330 }
2331 pthread_mutex_unlock(&mMutex);
2332
2333 if (urgent_frame_number_valid || frame_number_valid) {
2334 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2335 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2336 ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2337 __func__, urgentFrameNumDiff, last_urgent_frame_number);
2338 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2339 ALOGE("%s: frameNumDiff: %d frameNum: %d",
2340 __func__, frameNumDiff, last_frame_number);
2341 }
2342
2343 for (size_t i = 0; i < loopCount; i++) {
2344 /* handleMetadataWithLock is called even for invalid_metadata for
2345 * pipeline depth calculation */
2346 if (!invalid_metadata) {
2347 /* Infer frame number. Batch metadata contains frame number of the
2348 * last frame */
2349 if (urgent_frame_number_valid) {
2350 if (i < urgentFrameNumDiff) {
2351 urgent_frame_number =
2352 first_urgent_frame_number + i;
2353 CDBG("%s: inferred urgent frame_number: %d",
2354 __func__, urgent_frame_number);
2355 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2356 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2357 } else {
2358 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2359 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2360 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2361 }
2362 }
2363
2364 /* Infer frame number. Batch metadata contains frame number of the
2365 * last frame */
2366 if (frame_number_valid) {
2367 if (i < frameNumDiff) {
2368 frame_number = first_frame_number + i;
2369 CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2370 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2371 CAM_INTF_META_FRAME_NUMBER, frame_number);
2372 } else {
2373 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2374 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2375 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2376 }
2377 }
2378
2379 if (last_frame_capture_time) {
2380 //Infer timestamp
2381 first_frame_capture_time = last_frame_capture_time -
2382 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2383 capture_time =
2384 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2385 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2386 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2387 CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
2388 __func__, last_frame_capture_time, capture_time);
2389 }
2390 }
2391 pthread_mutex_lock(&mMutex);
2392 handleMetadataWithLock(metadata_buf,
2393 false /* free_and_bufdone_meta_buf */,
2394 (i == 0) /* first metadata in the batch metadata */);
2395 pthread_mutex_unlock(&mMutex);
2396 }
2397
2398 done_batch_metadata:
2399 /* BufDone metadata buffer */
2400 if (free_and_bufdone_meta_buf) {
2401 mMetadataChannel->bufDone(metadata_buf);
2402 free(metadata_buf);
2403 }
2404 }
2405
2406 /*===========================================================================
2407 * FUNCTION : handleMetadataWithLock
2408 *
2409 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2410 *
2411 * PARAMETERS : @metadata_buf: metadata buffer
2412 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2413 * the meta buf in this method
2414 * @firstMetadataInBatch: Boolean to indicate whether this is the
2415 * first metadata in a batch. Valid only for batch mode
2416 *
2417 * RETURN :
2418 *
2419 *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool firstMetadataInBatch)2420 void QCamera3HardwareInterface::handleMetadataWithLock(
2421 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2422 bool firstMetadataInBatch)
2423 {
2424 ATRACE_CALL();
2425
2426 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2427 int32_t frame_number_valid, urgent_frame_number_valid;
2428 uint32_t frame_number, urgent_frame_number;
2429 int64_t capture_time;
2430 bool unfinished_raw_request = false;
2431
2432 int32_t *p_frame_number_valid =
2433 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2434 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2435 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2436 int32_t *p_urgent_frame_number_valid =
2437 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2438 uint32_t *p_urgent_frame_number =
2439 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2440 IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2441 metadata) {
2442 ALOGE("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2443 __func__, *p_frame_number_valid, *p_frame_number);
2444 }
2445
2446 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2447 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2448 ALOGE("%s: Invalid metadata", __func__);
2449 if (free_and_bufdone_meta_buf) {
2450 mMetadataChannel->bufDone(metadata_buf);
2451 free(metadata_buf);
2452 }
2453 goto done_metadata;
2454 } else {
2455 frame_number_valid = *p_frame_number_valid;
2456 frame_number = *p_frame_number;
2457 capture_time = *p_capture_time;
2458 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2459 urgent_frame_number = *p_urgent_frame_number;
2460 }
2461 //Partial result on process_capture_result for timestamp
2462 if (urgent_frame_number_valid) {
2463 CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2464 __func__, urgent_frame_number, capture_time);
2465
2466 //Recieved an urgent Frame Number, handle it
2467 //using partial results
2468 for (pendingRequestIterator i =
2469 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2470 CDBG("%s: Iterator Frame = %d urgent frame = %d",
2471 __func__, i->frame_number, urgent_frame_number);
2472
2473 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2474 (i->partial_result_cnt == 0)) {
2475 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2476 __func__, i->frame_number);
2477 }
2478
2479 if (i->frame_number == urgent_frame_number &&
2480 i->bUrgentReceived == 0) {
2481
2482 camera3_capture_result_t result;
2483 memset(&result, 0, sizeof(camera3_capture_result_t));
2484
2485 i->partial_result_cnt++;
2486 i->bUrgentReceived = 1;
2487 // Extract 3A metadata
2488 result.result =
2489 translateCbUrgentMetadataToResultMetadata(metadata);
2490 // Populate metadata result
2491 result.frame_number = urgent_frame_number;
2492 result.num_output_buffers = 0;
2493 result.output_buffers = NULL;
2494 result.partial_result = i->partial_result_cnt;
2495
2496 mCallbackOps->process_capture_result(mCallbackOps, &result);
2497 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2498 __func__, result.frame_number, capture_time);
2499 free_camera_metadata((camera_metadata_t *)result.result);
2500 break;
2501 }
2502 }
2503 }
2504
2505 if (!frame_number_valid) {
2506 CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2507 if (free_and_bufdone_meta_buf) {
2508 mMetadataChannel->bufDone(metadata_buf);
2509 free(metadata_buf);
2510 }
2511 goto done_metadata;
2512 }
2513 CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2514 frame_number, capture_time);
2515
2516 for (pendingRequestIterator i = mPendingRequestsList.begin();
2517 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2518 // Flush out all entries with less or equal frame numbers.
2519
2520 camera3_capture_result_t result;
2521 memset(&result, 0, sizeof(camera3_capture_result_t));
2522
2523 CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2524
2525 // Check whether any stream buffer corresponding to this is dropped or not
2526 // If dropped, then send the ERROR_BUFFER for the corresponding stream
2527 // The API does not expect a blob buffer to be dropped
2528 if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2529 /* Clear notify_msg structure */
2530 camera3_notify_msg_t notify_msg;
2531 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2532 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2533 j != i->buffers.end(); j++) {
2534 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2535 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2536 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2537 if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2538 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2539 ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2540 __func__, i->frame_number, streamID, j->stream->format);
2541 notify_msg.type = CAMERA3_MSG_ERROR;
2542 notify_msg.message.error.frame_number = i->frame_number;
2543 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2544 notify_msg.message.error.error_stream = j->stream;
2545 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2546 ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2547 __func__, i->frame_number, streamID, j->stream->format);
2548 PendingFrameDropInfo PendingFrameDrop;
2549 PendingFrameDrop.frame_number=i->frame_number;
2550 PendingFrameDrop.stream_ID = streamID;
2551 // Add the Frame drop info to mPendingFrameDropList
2552 mPendingFrameDropList.push_back(PendingFrameDrop);
2553 }
2554 }
2555 }
2556 }
2557
2558 // Send empty metadata with already filled buffers for dropped metadata
2559 // and send valid metadata with already filled buffers for current metadata
2560 /* we could hit this case when we either
2561 * 1. have a pending reprocess request or
2562 * 2. miss a metadata buffer callback */
2563 if (i->frame_number < frame_number) {
2564 if (i->input_buffer) {
2565 /* this will be handled in handleInputBufferWithLock */
2566 i++;
2567 continue;
2568 } else if (i->need_dynamic_blklvl) {
2569 unfinished_raw_request = true;
2570 // i->partial_result_cnt--;
2571 CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
2572 __func__, i->frame_number, i->partial_result_cnt);
2573 i++;
2574 continue;
2575 } else if (i->pending_extra_result) {
2576 CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
2577 __func__, i->frame_number, i->partial_result_cnt,
2578 i->need_dynamic_blklvl);
2579 // i->partial_result_cnt--;
2580 i++;
2581 continue;
2582 } else {
2583 ALOGE("%s: Fatal: Missing metadata buffer for frame number %d", __func__, i->frame_number);
2584 if (free_and_bufdone_meta_buf) {
2585 mMetadataChannel->bufDone(metadata_buf);
2586 free(metadata_buf);
2587 }
2588 camera3_notify_msg_t notify_msg;
2589 memset(¬ify_msg, 0, sizeof(notify_msg));
2590 notify_msg.type = CAMERA3_MSG_ERROR;
2591 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
2592 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2593 goto done_metadata;
2594 }
2595 } else {
2596 i->partial_result_cnt++;
2597 CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
2598 __func__, i->frame_number, i->need_dynamic_blklvl,
2599 i->partial_result_cnt);
2600 if (!i->need_dynamic_blklvl) {
2601 CDBG("%s, meta for request without raw, frame number: %d\n",
2602 __func__, i->frame_number);
2603 if (!unfinished_raw_request) {
2604 i->partial_result_cnt++;
2605 CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
2606 __func__, i->partial_result_cnt);
2607 }
2608 }
2609
2610 result.partial_result = i->partial_result_cnt;
2611
2612 /* Clear notify_msg structure */
2613 camera3_notify_msg_t notify_msg;
2614 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2615
2616 // Send shutter notify to frameworks
2617 notify_msg.type = CAMERA3_MSG_SHUTTER;
2618 notify_msg.message.shutter.frame_number = i->frame_number;
2619 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2620 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2621
2622 i->timestamp = capture_time;
2623
2624 // Find channel requiring metadata, meaning internal offline postprocess
2625 // is needed.
2626 //TODO: for now, we don't support two streams requiring metadata at the same time.
2627 // (because we are not making copies, and metadata buffer is not reference counted.
2628 bool internalPproc = false;
2629 for (pendingBufferIterator iter = i->buffers.begin();
2630 iter != i->buffers.end(); iter++) {
2631 if (iter->need_metadata) {
2632 internalPproc = true;
2633 QCamera3ProcessingChannel *channel =
2634 (QCamera3ProcessingChannel *)iter->stream->priv;
2635 channel->queueReprocMetadata(metadata_buf);
2636 break;
2637 }
2638 }
2639
2640 result.result = translateFromHalMetadata(metadata,
2641 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2642 i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
2643 firstMetadataInBatch);
2644
2645 saveExifParams(metadata);
2646
2647 if (i->blob_request) {
2648 {
2649 //Dump tuning metadata if enabled and available
2650 char prop[PROPERTY_VALUE_MAX];
2651 memset(prop, 0, sizeof(prop));
2652 property_get("persist.camera.dumpmetadata", prop, "0");
2653 int32_t enabled = atoi(prop);
2654 if (enabled && metadata->is_tuning_params_valid) {
2655 dumpMetadataToFile(metadata->tuning_params,
2656 mMetaFrameCount,
2657 enabled,
2658 "Snapshot",
2659 frame_number);
2660 }
2661 }
2662 }
2663
2664 if (!internalPproc) {
2665 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2666 // Return metadata buffer
2667 if (free_and_bufdone_meta_buf) {
2668 mMetadataChannel->bufDone(metadata_buf);
2669 free(metadata_buf);
2670 }
2671 }
2672 }
2673 if (!result.result) {
2674 ALOGE("%s: metadata is NULL", __func__);
2675 }
2676 result.frame_number = i->frame_number;
2677 result.input_buffer = i->input_buffer;
2678 result.num_output_buffers = 0;
2679 result.output_buffers = NULL;
2680 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2681 j != i->buffers.end(); j++) {
2682 if (j->buffer) {
2683 result.num_output_buffers++;
2684 }
2685 }
2686
2687 if (result.num_output_buffers > 0) {
2688 camera3_stream_buffer_t *result_buffers =
2689 new camera3_stream_buffer_t[result.num_output_buffers];
2690 if (!result_buffers) {
2691 ALOGE("%s: Fatal error: out of memory", __func__);
2692 }
2693 size_t result_buffers_idx = 0;
2694 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2695 j != i->buffers.end(); j++) {
2696 if (j->buffer) {
2697 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2698 m != mPendingFrameDropList.end(); m++) {
2699 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2700 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2701 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2702 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2703 ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2704 __func__, frame_number, streamID);
2705 m = mPendingFrameDropList.erase(m);
2706 break;
2707 }
2708 }
2709
2710 for (List<PendingBufferInfo>::iterator k =
2711 mPendingBuffersMap.mPendingBufferList.begin();
2712 k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2713 if (k->buffer == j->buffer->buffer) {
2714 CDBG("%s: Found buffer %p in pending buffer List "
2715 "for frame %u, Take it out!!", __func__,
2716 k->buffer, k->frame_number);
2717 mPendingBuffersMap.num_buffers--;
2718 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2719 break;
2720 }
2721 }
2722
2723 result_buffers[result_buffers_idx++] = *(j->buffer);
2724 free(j->buffer);
2725 j->buffer = NULL;
2726 }
2727 }
2728 result.output_buffers = result_buffers;
2729 mCallbackOps->process_capture_result(mCallbackOps, &result);
2730 CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2731 __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2732 free_camera_metadata((camera_metadata_t *)result.result);
2733 delete[] result_buffers;
2734 } else {
2735 mCallbackOps->process_capture_result(mCallbackOps, &result);
2736 CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2737 __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2738 free_camera_metadata((camera_metadata_t *)result.result);
2739 }
2740
2741 if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2742 mPendingLiveRequest--;
2743 i = erasePendingRequest(i);
2744 } else {
2745 CDBG("%s, keep in list, frame number:%d, partial result:%d",
2746 __func__, i->frame_number, i->partial_result_cnt);
2747 i->pending_extra_result = true;
2748 i++;
2749 }
2750
2751 if (!mPendingReprocessResultList.empty()) {
2752 handlePendingReprocResults(frame_number + 1);
2753 }
2754
2755 }
2756
2757 done_metadata:
2758 for (pendingRequestIterator i = mPendingRequestsList.begin();
2759 i != mPendingRequestsList.end() ;i++) {
2760 i->pipeline_depth++;
2761 }
2762 CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2763 unblockRequestIfNecessary();
2764
2765 }
2766
2767 /*===========================================================================
2768 * FUNCTION : hdrPlusPerfLock
2769 *
2770 * DESCRIPTION: perf lock for HDR+ using custom intent
2771 *
2772 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2773 *
2774 * RETURN : None
2775 *
2776 *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)2777 void QCamera3HardwareInterface::hdrPlusPerfLock(
2778 mm_camera_super_buf_t *metadata_buf)
2779 {
2780 if (NULL == metadata_buf) {
2781 ALOGE("%s: metadata_buf is NULL", __func__);
2782 return;
2783 }
2784 metadata_buffer_t *metadata =
2785 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2786 int32_t *p_frame_number_valid =
2787 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2788 uint32_t *p_frame_number =
2789 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2790
2791 //acquire perf lock for 5 sec after the last HDR frame is captured
2792 if (*p_frame_number_valid) {
2793 if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2794 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2795 }
2796 }
2797
2798 //release lock after perf lock timer is expired. If lock is already released,
2799 //isTimerReset returns false
2800 if (m_perfLock.isTimerReset()) {
2801 mLastCustIntentFrmNum = -1;
2802 m_perfLock.lock_rel_timed();
2803 }
2804 }
2805
2806 /*===========================================================================
2807 * FUNCTION : handleInputBufferWithLock
2808 *
2809 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2810 *
2811 * PARAMETERS : @frame_number: frame number of the input buffer
2812 *
2813 * RETURN :
2814 *
2815 *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)2816 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2817 {
2818 ATRACE_CALL();
2819 pendingRequestIterator i = mPendingRequestsList.begin();
2820 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2821 i++;
2822 }
2823 if (i != mPendingRequestsList.end() && i->input_buffer) {
2824 //found the right request
2825 if (!i->shutter_notified) {
2826 CameraMetadata settings;
2827 camera3_notify_msg_t notify_msg;
2828 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2829 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2830 if(i->settings) {
2831 settings = i->settings;
2832 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2833 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2834 } else {
2835 ALOGE("%s: No timestamp in input settings! Using current one.",
2836 __func__);
2837 }
2838 } else {
2839 ALOGE("%s: Input settings missing!", __func__);
2840 }
2841
2842 notify_msg.type = CAMERA3_MSG_SHUTTER;
2843 notify_msg.message.shutter.frame_number = frame_number;
2844 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2845 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2846 i->shutter_notified = true;
2847 CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2848 __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2849 }
2850
2851 if (i->input_buffer->release_fence != -1) {
2852 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2853 close(i->input_buffer->release_fence);
2854 if (rc != OK) {
2855 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2856 }
2857 }
2858
2859 camera3_capture_result result;
2860 memset(&result, 0, sizeof(camera3_capture_result));
2861 result.frame_number = frame_number;
2862 result.result = i->settings;
2863 result.input_buffer = i->input_buffer;
2864 result.partial_result = PARTIAL_RESULT_COUNT;
2865
2866 mCallbackOps->process_capture_result(mCallbackOps, &result);
2867 CDBG("%s: Input request metadata and input buffer frame_number = %u",
2868 __func__, i->frame_number);
2869 i = erasePendingRequest(i);
2870 } else {
2871 ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2872 }
2873 }
2874
getBlackLevelRegion(int (& opticalBlackRegions)[4])2875 bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
2876 {
2877 if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
2878 /*just calculate one region black level and send to fwk*/
2879 for (size_t i = 0; i < 4; i++) {
2880 opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
2881 }
2882 return TRUE;
2883 }
2884
2885 return FALSE;
2886 }
2887
sendDynamicBlackLevel(float blacklevel[4],uint32_t frame_number)2888 void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
2889 {
2890 CDBG("%s, E.\n", __func__);
2891 pthread_mutex_lock(&mMutex);
2892 sendDynamicBlackLevelWithLock(blacklevel, frame_number);
2893 pthread_mutex_unlock(&mMutex);
2894 CDBG("%s, X.\n", __func__);
2895 }
2896
sendDynamicBlackLevelWithLock(float blacklevel[4],uint32_t frame_number)2897 void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
2898 {
2899 CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
2900
2901 pendingRequestIterator i = mPendingRequestsList.begin();
2902 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2903 i++;
2904 }
2905 if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
2906 ALOGE("%s, error: invalid frame number.", __func__);
2907 return;
2908 }
2909
2910 i->partial_result_cnt++;
2911
2912 CameraMetadata camMetadata;
2913 int64_t fwk_frame_number = (int64_t)frame_number;
2914 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2915
2916 // update dynamic black level here
2917 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
2918
2919 camera3_capture_result_t result;
2920 memset(&result, 0, sizeof(camera3_capture_result_t));
2921 result.frame_number = frame_number;
2922 result.num_output_buffers = 0;
2923 result.result = camMetadata.release();
2924 result.partial_result = i->partial_result_cnt;
2925
2926 CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
2927 __func__, result.partial_result, frame_number, i->pending_extra_result);
2928 mCallbackOps->process_capture_result(mCallbackOps, &result);
2929 free_camera_metadata((camera_metadata_t *)result.result);
2930
2931 if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2932 CDBG("%s, remove cur request from pending list.", __func__);
2933 mPendingLiveRequest--;
2934 i = erasePendingRequest(i);
2935
2936 // traverse the remaining pending list to see whether need to send cached ones..
2937 while (i != mPendingRequestsList.end()) {
2938 CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
2939 __func__, i->frame_number, i->partial_result_cnt,
2940 i->pending_extra_result);
2941
2942 if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
2943 && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
2944 // send out final result, and remove it from pending list.
2945 CameraMetadata camMetadata;
2946 int64_t fwk_frame_number = (int64_t)i->frame_number;
2947 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2948
2949 memset(&result, 0, sizeof(camera3_capture_result_t));
2950 result.frame_number = i->frame_number;
2951 result.num_output_buffers = 0;
2952 result.result = camMetadata.release();
2953 result.partial_result = i->partial_result_cnt + 1;
2954
2955 mCallbackOps->process_capture_result(mCallbackOps, &result);
2956 free_camera_metadata((camera_metadata_t *)result.result);
2957
2958 mPendingLiveRequest--;
2959 i = erasePendingRequest(i);
2960 CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
2961 __func__, mPendingLiveRequest, mPendingRequestsList.size());
2962 } else {
2963 break;
2964 }
2965 }
2966 }
2967
2968 unblockRequestIfNecessary();
2969 CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
2970 }
2971
2972
2973 /*===========================================================================
2974 * FUNCTION : handleBufferWithLock
2975 *
2976 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2977 *
2978 * PARAMETERS : @buffer: image buffer for the callback
2979 * @frame_number: frame number of the image buffer
2980 *
2981 * RETURN :
2982 *
2983 *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)2984 void QCamera3HardwareInterface::handleBufferWithLock(
2985 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2986 {
2987 ATRACE_CALL();
2988 // If the frame number doesn't exist in the pending request list,
2989 // directly send the buffer to the frameworks, and update pending buffers map
2990 // Otherwise, book-keep the buffer.
2991 pendingRequestIterator i = mPendingRequestsList.begin();
2992 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2993 i++;
2994 }
2995 if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
2996 if (i != mPendingRequestsList.end()) {
2997 // though the pendingRequestInfo is still in the list,
2998 // still send the buffer directly, as the pending_extra_result is true,
2999 // and we've already received meta for this frame number.
3000 CDBG("%s, send the buffer directly, frame number:%d",
3001 __func__, i->frame_number);
3002 }
3003 // Verify all pending requests frame_numbers are greater
3004 for (pendingRequestIterator j = mPendingRequestsList.begin();
3005 j != mPendingRequestsList.end(); j++) {
3006 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3007 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
3008 __func__, j->frame_number, frame_number);
3009 }
3010 }
3011 camera3_capture_result_t result;
3012 memset(&result, 0, sizeof(camera3_capture_result_t));
3013 result.result = NULL;
3014 result.frame_number = frame_number;
3015 result.num_output_buffers = 1;
3016 result.partial_result = 0;
3017 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3018 m != mPendingFrameDropList.end(); m++) {
3019 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3020 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3021 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3022 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3023 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
3024 __func__, frame_number, streamID);
3025 m = mPendingFrameDropList.erase(m);
3026 break;
3027 }
3028 }
3029 result.output_buffers = buffer;
3030 CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
3031 __func__, frame_number, buffer->buffer);
3032
3033 for (List<PendingBufferInfo>::iterator k =
3034 mPendingBuffersMap.mPendingBufferList.begin();
3035 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3036 if (k->buffer == buffer->buffer) {
3037 CDBG("%s: Found Frame buffer, take it out from list",
3038 __func__);
3039
3040 mPendingBuffersMap.num_buffers--;
3041 k = mPendingBuffersMap.mPendingBufferList.erase(k);
3042 break;
3043 }
3044 }
3045 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3046 __func__, mPendingBuffersMap.num_buffers);
3047
3048 mCallbackOps->process_capture_result(mCallbackOps, &result);
3049 } else {
3050 if (i->input_buffer) {
3051 CameraMetadata settings;
3052 camera3_notify_msg_t notify_msg;
3053 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3054 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3055 if(i->settings) {
3056 settings = i->settings;
3057 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3058 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3059 } else {
3060 ALOGE("%s: No timestamp in input settings! Using current one.",
3061 __func__);
3062 }
3063 } else {
3064 ALOGE("%s: Input settings missing!", __func__);
3065 }
3066
3067 notify_msg.type = CAMERA3_MSG_SHUTTER;
3068 notify_msg.message.shutter.frame_number = frame_number;
3069 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3070
3071 if (i->input_buffer->release_fence != -1) {
3072 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3073 close(i->input_buffer->release_fence);
3074 if (rc != OK) {
3075 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3076 }
3077 }
3078
3079 for (List<PendingBufferInfo>::iterator k =
3080 mPendingBuffersMap.mPendingBufferList.begin();
3081 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3082 if (k->buffer == buffer->buffer) {
3083 CDBG("%s: Found Frame buffer, take it out from list",
3084 __func__);
3085
3086 mPendingBuffersMap.num_buffers--;
3087 k = mPendingBuffersMap.mPendingBufferList.erase(k);
3088 break;
3089 }
3090 }
3091 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3092 __func__, mPendingBuffersMap.num_buffers);
3093
3094 bool notifyNow = true;
3095 for (pendingRequestIterator j = mPendingRequestsList.begin();
3096 j != mPendingRequestsList.end(); j++) {
3097 if (j->frame_number < frame_number) {
3098 notifyNow = false;
3099 break;
3100 }
3101 }
3102
3103 if (notifyNow) {
3104 camera3_capture_result result;
3105 memset(&result, 0, sizeof(camera3_capture_result));
3106 result.frame_number = frame_number;
3107 result.result = i->settings;
3108 result.input_buffer = i->input_buffer;
3109 result.num_output_buffers = 1;
3110 result.output_buffers = buffer;
3111 result.partial_result = PARTIAL_RESULT_COUNT;
3112
3113 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3114 mCallbackOps->process_capture_result(mCallbackOps, &result);
3115 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
3116 i = erasePendingRequest(i);
3117 } else {
3118 // Cache reprocess result for later
3119 PendingReprocessResult pendingResult;
3120 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3121 pendingResult.notify_msg = notify_msg;
3122 pendingResult.buffer = *buffer;
3123 pendingResult.frame_number = frame_number;
3124 mPendingReprocessResultList.push_back(pendingResult);
3125 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
3126 }
3127 } else {
3128 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3129 j != i->buffers.end(); j++) {
3130 if (j->stream == buffer->stream) {
3131 if (j->buffer != NULL) {
3132 ALOGE("%s: Error: buffer is already set", __func__);
3133 } else {
3134 j->buffer = (camera3_stream_buffer_t *)malloc(
3135 sizeof(camera3_stream_buffer_t));
3136 *(j->buffer) = *buffer;
3137 CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
3138 __func__, buffer, frame_number);
3139 }
3140 }
3141 }
3142 }
3143 }
3144 }
3145
3146 /*===========================================================================
3147 * FUNCTION : unblockRequestIfNecessary
3148 *
3149 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3150 * that mMutex is held when this function is called.
3151 *
3152 * PARAMETERS :
3153 *
3154 * RETURN :
3155 *
3156 *==========================================================================*/
unblockRequestIfNecessary()3157 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3158 {
3159 // Unblock process_capture_request
3160 pthread_cond_signal(&mRequestCond);
3161 }
3162
3163
3164 /*===========================================================================
3165 * FUNCTION : processCaptureRequest
3166 *
3167 * DESCRIPTION: process a capture request from camera service
3168 *
3169 * PARAMETERS :
3170 * @request : request from framework to process
3171 *
3172 * RETURN :
3173 *
3174 *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3175 int QCamera3HardwareInterface::processCaptureRequest(
3176 camera3_capture_request_t *request)
3177 {
3178 ATRACE_CALL();
3179 int rc = NO_ERROR;
3180 int32_t request_id;
3181 CameraMetadata meta;
3182 uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3183 uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3184 bool isVidBufRequested = false;
3185 camera3_stream_buffer_t *pInputBuffer = NULL;
3186
3187 pthread_mutex_lock(&mMutex);
3188
3189 rc = validateCaptureRequest(request);
3190 if (rc != NO_ERROR) {
3191 ALOGE("%s: incoming request is not valid", __func__);
3192 pthread_mutex_unlock(&mMutex);
3193 return rc;
3194 }
3195
3196 meta = request->settings;
3197
3198 // For first capture request, send capture intent, and
3199 // stream on all streams
3200 if (mFirstRequest) {
3201 // send an unconfigure to the backend so that the isp
3202 // resources are deallocated
3203 if (!mFirstConfiguration) {
3204 cam_stream_size_info_t stream_config_info;
3205 int32_t hal_version = CAM_HAL_V3;
3206 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3207 stream_config_info.buffer_info.min_buffers =
3208 MIN_INFLIGHT_REQUESTS;
3209 stream_config_info.buffer_info.max_buffers =
3210 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3211 clear_metadata_buffer(mParameters);
3212 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3213 CAM_INTF_PARM_HAL_VERSION, hal_version);
3214 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3215 CAM_INTF_META_STREAM_INFO, stream_config_info);
3216 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3217 mParameters);
3218 if (rc < 0) {
3219 ALOGE("%s: set_parms for unconfigure failed", __func__);
3220 pthread_mutex_unlock(&mMutex);
3221 return rc;
3222 }
3223 }
3224 m_perfLock.lock_acq();
3225 /* get eis information for stream configuration */
3226 cam_is_type_t is_type;
3227 char is_type_value[PROPERTY_VALUE_MAX];
3228 property_get("persist.camera.is_type", is_type_value, "0");
3229 is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3230
3231 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3232 int32_t hal_version = CAM_HAL_V3;
3233 uint8_t captureIntent =
3234 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3235 mCaptureIntent = captureIntent;
3236 clear_metadata_buffer(mParameters);
3237 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3238 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3239 }
3240
3241 //If EIS is enabled, turn it on for video
3242 bool setEis = m_bEisEnable && m_bEisSupportedSize;
3243 int32_t vsMode;
3244 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3246 rc = BAD_VALUE;
3247 }
3248
3249 //IS type will be 0 unless EIS is supported. If EIS is supported
3250 //it could either be 1 or 4 depending on the stream and video size
3251 if (setEis) {
3252 if (!m_bEisSupportedSize) {
3253 is_type = IS_TYPE_DIS;
3254 } else {
3255 is_type = IS_TYPE_EIS_2_0;
3256 }
3257 mStreamConfigInfo.is_type = is_type;
3258 } else {
3259 mStreamConfigInfo.is_type = IS_TYPE_NONE;
3260 }
3261
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3263 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3264 int32_t tintless_value = 1;
3265 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3266 CAM_INTF_PARM_TINTLESS, tintless_value);
3267 //Disable CDS for HFR mode and if mPprocBypass = true.
3268 //CDS is a session parameter in the backend/ISP, so need to be set/reset
3269 //after every configure_stream
3270 if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3271 (m_bIsVideo)) {
3272 int32_t cds = CAM_CDS_MODE_OFF;
3273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3274 CAM_INTF_PARM_CDS_MODE, cds))
3275 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3276
3277 }
3278 setMobicat();
3279
3280 /* Set fps and hfr mode while sending meta stream info so that sensor
3281 * can configure appropriate streaming mode */
3282 mHFRVideoFps = DEFAULT_VIDEO_FPS;
3283 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3284 rc = setHalFpsRange(meta, mParameters);
3285 if (rc != NO_ERROR) {
3286 ALOGE("%s: setHalFpsRange failed", __func__);
3287 }
3288 }
3289 if (meta.exists(ANDROID_CONTROL_MODE)) {
3290 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3291 rc = extractSceneMode(meta, metaMode, mParameters);
3292 if (rc != NO_ERROR) {
3293 ALOGE("%s: extractSceneMode failed", __func__);
3294 }
3295 }
3296
3297 //TODO: validate the arguments, HSV scenemode should have only the
3298 //advertised fps ranges
3299
3300 /*set the capture intent, hal version, tintless, stream info,
3301 *and disenable parameters to the backend*/
3302 CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3303 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3304 mParameters);
3305
3306 cam_dimension_t sensor_dim;
3307 memset(&sensor_dim, 0, sizeof(sensor_dim));
3308 rc = getSensorOutputSize(sensor_dim);
3309 if (rc != NO_ERROR) {
3310 ALOGE("%s: Failed to get sensor output size", __func__);
3311 pthread_mutex_unlock(&mMutex);
3312 goto error_exit;
3313 }
3314
3315 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3316 gCamCapability[mCameraId]->active_array_size.height,
3317 sensor_dim.width, sensor_dim.height);
3318
3319 /* Set batchmode before initializing channel. Since registerBuffer
3320 * internally initializes some of the channels, better set batchmode
3321 * even before first register buffer */
3322 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3323 it != mStreamInfo.end(); it++) {
3324 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3325 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3326 && mBatchSize) {
3327 rc = channel->setBatchSize(mBatchSize);
3328 //Disable per frame map unmap for HFR/batchmode case
3329 rc |= channel->setPerFrameMapUnmap(false);
3330 if (NO_ERROR != rc) {
3331 ALOGE("%s : Channel init failed %d", __func__, rc);
3332 pthread_mutex_unlock(&mMutex);
3333 goto error_exit;
3334 }
3335 }
3336 }
3337
3338 //First initialize all streams
3339 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3340 it != mStreamInfo.end(); it++) {
3341 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3342 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3343 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3344 setEis)
3345 rc = channel->initialize(is_type);
3346 else {
3347 rc = channel->initialize(IS_TYPE_NONE);
3348 }
3349 if (NO_ERROR != rc) {
3350 ALOGE("%s : Channel initialization failed %d", __func__, rc);
3351 pthread_mutex_unlock(&mMutex);
3352 goto error_exit;
3353 }
3354 }
3355
3356 if (mRawDumpChannel) {
3357 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3358 if (rc != NO_ERROR) {
3359 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3360 pthread_mutex_unlock(&mMutex);
3361 goto error_exit;
3362 }
3363 }
3364 if (mSupportChannel) {
3365 rc = mSupportChannel->initialize(IS_TYPE_NONE);
3366 if (rc < 0) {
3367 ALOGE("%s: Support channel initialization failed", __func__);
3368 pthread_mutex_unlock(&mMutex);
3369 goto error_exit;
3370 }
3371 }
3372 if (mAnalysisChannel) {
3373 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3374 if (rc < 0) {
3375 ALOGE("%s: Analysis channel initialization failed", __func__);
3376 pthread_mutex_unlock(&mMutex);
3377 goto error_exit;
3378 }
3379 }
3380 if (mDummyBatchChannel) {
3381 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3382 if (rc < 0) {
3383 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3384 pthread_mutex_unlock(&mMutex);
3385 goto error_exit;
3386 }
3387 rc = mDummyBatchChannel->initialize(is_type);
3388 if (rc < 0) {
3389 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3390 pthread_mutex_unlock(&mMutex);
3391 goto error_exit;
3392 }
3393 }
3394
3395 // Set bundle info
3396 rc = setBundleInfo();
3397 if (rc < 0) {
3398 ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3399 pthread_mutex_unlock(&mMutex);
3400 goto error_exit;
3401 }
3402
3403 //Then start them.
3404 CDBG_HIGH("%s: Start META Channel", __func__);
3405 rc = mMetadataChannel->start();
3406 if (rc < 0) {
3407 ALOGE("%s: META channel start failed", __func__);
3408 pthread_mutex_unlock(&mMutex);
3409 goto error_exit;
3410 }
3411
3412 if (mAnalysisChannel) {
3413 rc = mAnalysisChannel->start();
3414 if (rc < 0) {
3415 ALOGE("%s: Analysis channel start failed", __func__);
3416 mMetadataChannel->stop();
3417 pthread_mutex_unlock(&mMutex);
3418 goto error_exit;
3419 }
3420 }
3421
3422 if (mSupportChannel) {
3423 rc = mSupportChannel->start();
3424 if (rc < 0) {
3425 ALOGE("%s: Support channel start failed", __func__);
3426 mMetadataChannel->stop();
3427 /* Although support and analysis are mutually exclusive today
3428 adding it in anycase for future proofing */
3429 if (mAnalysisChannel) {
3430 mAnalysisChannel->stop();
3431 }
3432 pthread_mutex_unlock(&mMutex);
3433 goto error_exit;
3434 }
3435 }
3436 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3437 it != mStreamInfo.end(); it++) {
3438 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3439 CDBG_HIGH("%s: Start Processing Channel mask=%d",
3440 __func__, channel->getStreamTypeMask());
3441 rc = channel->start();
3442 if (rc < 0) {
3443 ALOGE("%s: channel start failed", __func__);
3444 pthread_mutex_unlock(&mMutex);
3445 goto error_exit;
3446 }
3447 }
3448
3449 if (mRawDumpChannel) {
3450 CDBG("%s: Starting raw dump stream",__func__);
3451 rc = mRawDumpChannel->start();
3452 if (rc != NO_ERROR) {
3453 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3454 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3455 it != mStreamInfo.end(); it++) {
3456 QCamera3Channel *channel =
3457 (QCamera3Channel *)(*it)->stream->priv;
3458 ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3459 channel->getStreamTypeMask());
3460 channel->stop();
3461 }
3462 if (mSupportChannel)
3463 mSupportChannel->stop();
3464 if (mAnalysisChannel) {
3465 mAnalysisChannel->stop();
3466 }
3467 mMetadataChannel->stop();
3468 pthread_mutex_unlock(&mMutex);
3469 goto error_exit;
3470 }
3471 }
3472
3473 if (mChannelHandle) {
3474
3475 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3476 mChannelHandle);
3477 if (rc != NO_ERROR) {
3478 ALOGE("%s: start_channel failed %d", __func__, rc);
3479 pthread_mutex_unlock(&mMutex);
3480 goto error_exit;
3481 }
3482 }
3483
3484
3485 goto no_error;
3486 error_exit:
3487 m_perfLock.lock_rel();
3488 return rc;
3489 no_error:
3490 m_perfLock.lock_rel();
3491
3492 mWokenUpByDaemon = false;
3493 mPendingLiveRequest = 0;
3494 mFirstConfiguration = false;
3495 enablePowerHint();
3496 }
3497
3498 uint32_t frameNumber = request->frame_number;
3499 cam_stream_ID_t streamID;
3500
3501 if (meta.exists(ANDROID_REQUEST_ID)) {
3502 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3503 mCurrentRequestId = request_id;
3504 CDBG("%s: Received request with id: %d",__func__, request_id);
3505 } else if (mFirstRequest || mCurrentRequestId == -1){
3506 ALOGE("%s: Unable to find request id field, \
3507 & no previous id available", __func__);
3508 pthread_mutex_unlock(&mMutex);
3509 return NAME_NOT_FOUND;
3510 } else {
3511 CDBG("%s: Re-using old request id", __func__);
3512 request_id = mCurrentRequestId;
3513 }
3514
3515 CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3516 __func__, __LINE__,
3517 request->num_output_buffers,
3518 request->input_buffer,
3519 frameNumber);
3520 // Acquire all request buffers first
3521 streamID.num_streams = 0;
3522 int blob_request = 0;
3523 uint32_t snapshotStreamId = 0;
3524 for (size_t i = 0; i < request->num_output_buffers; i++) {
3525 const camera3_stream_buffer_t& output = request->output_buffers[i];
3526 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3527
3528 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3529 //Call function to store local copy of jpeg data for encode params.
3530 blob_request = 1;
3531 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3532 }
3533
3534 if (output.acquire_fence != -1) {
3535 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3536 close(output.acquire_fence);
3537 if (rc != OK) {
3538 ALOGE("%s: sync wait failed %d", __func__, rc);
3539 pthread_mutex_unlock(&mMutex);
3540 return rc;
3541 }
3542 }
3543
3544 streamID.streamID[streamID.num_streams] =
3545 channel->getStreamID(channel->getStreamTypeMask());
3546 streamID.num_streams++;
3547
3548 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3549 isVidBufRequested = true;
3550 }
3551 }
3552
3553 if (blob_request && mRawDumpChannel) {
3554 CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3555 streamID.streamID[streamID.num_streams] =
3556 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3557 streamID.num_streams++;
3558 }
3559
3560 if(request->input_buffer == NULL) {
3561 /* Parse the settings:
3562 * - For every request in NORMAL MODE
3563 * - For every request in HFR mode during preview only case
3564 * - For first request of every batch in HFR mode during video
3565 * recording. In batchmode the same settings except frame number is
3566 * repeated in each request of the batch.
3567 */
3568 if (!mBatchSize ||
3569 (mBatchSize && !isVidBufRequested) ||
3570 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3571 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3572 if (rc < 0) {
3573 ALOGE("%s: fail to set frame parameters", __func__);
3574 pthread_mutex_unlock(&mMutex);
3575 return rc;
3576 }
3577 }
3578 /* For batchMode HFR, setFrameParameters is not called for every
3579 * request. But only frame number of the latest request is parsed.
3580 * Keep track of first and last frame numbers in a batch so that
3581 * metadata for the frame numbers of batch can be duplicated in
3582 * handleBatchMetadta */
3583 if (mBatchSize) {
3584 if (!mToBeQueuedVidBufs) {
3585 //start of the batch
3586 mFirstFrameNumberInBatch = request->frame_number;
3587 }
3588 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3589 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3590 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3591 return BAD_VALUE;
3592 }
3593 }
3594 if (mNeedSensorRestart) {
3595 /* Unlock the mutex as restartSensor waits on the channels to be
3596 * stopped, which in turn calls stream callback functions -
3597 * handleBufferWithLock and handleMetadataWithLock */
3598 pthread_mutex_unlock(&mMutex);
3599 rc = dynamicUpdateMetaStreamInfo();
3600 if (rc != NO_ERROR) {
3601 ALOGE("%s: Restarting the sensor failed", __func__);
3602 return BAD_VALUE;
3603 }
3604 mNeedSensorRestart = false;
3605 pthread_mutex_lock(&mMutex);
3606 }
3607 } else {
3608
3609 if (request->input_buffer->acquire_fence != -1) {
3610 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3611 close(request->input_buffer->acquire_fence);
3612 if (rc != OK) {
3613 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3614 pthread_mutex_unlock(&mMutex);
3615 return rc;
3616 }
3617 }
3618 }
3619
3620 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3621 mLastCustIntentFrmNum = frameNumber;
3622 }
3623 /* Update pending request list and pending buffers map */
3624 PendingRequestInfo pendingRequest;
3625 pendingRequestIterator latestRequest;
3626 pendingRequest.frame_number = frameNumber;
3627 pendingRequest.num_buffers = request->num_output_buffers;
3628 pendingRequest.request_id = request_id;
3629 pendingRequest.blob_request = blob_request;
3630 pendingRequest.timestamp = 0;
3631 pendingRequest.bUrgentReceived = 0;
3632 if (request->input_buffer) {
3633 pendingRequest.input_buffer =
3634 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3635 *(pendingRequest.input_buffer) = *(request->input_buffer);
3636 pInputBuffer = pendingRequest.input_buffer;
3637 } else {
3638 pendingRequest.input_buffer = NULL;
3639 pInputBuffer = NULL;
3640 }
3641
3642 pendingRequest.pipeline_depth = 0;
3643 pendingRequest.partial_result_cnt = 0;
3644 extractJpegMetadata(mCurJpegMeta, request);
3645 pendingRequest.jpegMetadata = mCurJpegMeta;
3646 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3647 pendingRequest.shutter_notified = false;
3648 pendingRequest.need_dynamic_blklvl = false;
3649 pendingRequest.pending_extra_result = false;
3650
3651 //extract capture intent
3652 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3653 mCaptureIntent =
3654 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3655 }
3656 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3657 mHybridAeEnable =
3658 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3659 }
3660 pendingRequest.capture_intent = mCaptureIntent;
3661 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3662
3663 for (size_t i = 0; i < request->num_output_buffers; i++) {
3664 RequestedBufferInfo requestedBuf;
3665 memset(&requestedBuf, 0, sizeof(requestedBuf));
3666 requestedBuf.stream = request->output_buffers[i].stream;
3667 requestedBuf.buffer = NULL;
3668 pendingRequest.buffers.push_back(requestedBuf);
3669
3670 // Add to buffer handle the pending buffers list
3671 PendingBufferInfo bufferInfo;
3672 bufferInfo.frame_number = frameNumber;
3673 bufferInfo.buffer = request->output_buffers[i].buffer;
3674 bufferInfo.stream = request->output_buffers[i].stream;
3675 mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3676 mPendingBuffersMap.num_buffers++;
3677 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3678 CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3679 __func__, frameNumber, bufferInfo.buffer,
3680 channel->getStreamTypeMask(), bufferInfo.stream->format);
3681
3682 if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
3683 if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
3684 CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
3685 pendingRequest.need_dynamic_blklvl = true;
3686 }
3687 }
3688 }
3689 mPendingBuffersMap.last_frame_number = frameNumber;
3690 latestRequest = mPendingRequestsList.insert(
3691 mPendingRequestsList.end(), pendingRequest);
3692 if(mFlush) {
3693 pthread_mutex_unlock(&mMutex);
3694 return NO_ERROR;
3695 }
3696
3697 // Notify metadata channel we receive a request
3698 mMetadataChannel->request(NULL, frameNumber);
3699
3700 if(request->input_buffer != NULL){
3701 CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3702 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3703 if (NO_ERROR != rc) {
3704 ALOGE("%s: fail to set reproc parameters", __func__);
3705 pthread_mutex_unlock(&mMutex);
3706 return rc;
3707 }
3708 }
3709
3710 // Call request on other streams
3711 uint32_t streams_need_metadata = 0;
3712 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3713 for (size_t i = 0; i < request->num_output_buffers; i++) {
3714 const camera3_stream_buffer_t& output = request->output_buffers[i];
3715 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3716
3717 if (channel == NULL) {
3718 ALOGE("%s: invalid channel pointer for stream", __func__);
3719 continue;
3720 }
3721
3722 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3723 if(request->input_buffer != NULL){
3724 rc = channel->request(output.buffer, frameNumber,
3725 pInputBuffer, &mReprocMeta);
3726 if (rc < 0) {
3727 ALOGE("%s: Fail to request on picture channel", __func__);
3728 pthread_mutex_unlock(&mMutex);
3729 return rc;
3730 }
3731 } else {
3732 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3733 __LINE__, output.buffer, frameNumber);
3734 if (!request->settings) {
3735 rc = channel->request(output.buffer, frameNumber,
3736 NULL, mPrevParameters);
3737 } else {
3738 rc = channel->request(output.buffer, frameNumber,
3739 NULL, mParameters);
3740 }
3741 if (rc < 0) {
3742 ALOGE("%s: Fail to request on picture channel", __func__);
3743 pthread_mutex_unlock(&mMutex);
3744 return rc;
3745 }
3746 pendingBufferIter->need_metadata = true;
3747 streams_need_metadata++;
3748 }
3749 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3750 bool needMetadata = false;
3751 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3752 rc = yuvChannel->request(output.buffer, frameNumber,
3753 pInputBuffer,
3754 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3755 if (rc < 0) {
3756 ALOGE("%s: Fail to request on YUV channel", __func__);
3757 pthread_mutex_unlock(&mMutex);
3758 return rc;
3759 }
3760 pendingBufferIter->need_metadata = needMetadata;
3761 if (needMetadata)
3762 streams_need_metadata += 1;
3763 CDBG("%s: calling YUV channel request, need_metadata is %d",
3764 __func__, needMetadata);
3765 } else {
3766 CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3767 __LINE__, output.buffer, frameNumber);
3768 rc = channel->request(output.buffer, frameNumber);
3769 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3770 && mBatchSize) {
3771 mToBeQueuedVidBufs++;
3772 if (mToBeQueuedVidBufs == mBatchSize) {
3773 channel->queueBatchBuf();
3774 }
3775 }
3776 if (rc < 0) {
3777 ALOGE("%s: request failed", __func__);
3778 pthread_mutex_unlock(&mMutex);
3779 return rc;
3780 }
3781 }
3782 pendingBufferIter++;
3783 }
3784
3785 //If 2 streams have need_metadata set to true, fail the request, unless
3786 //we copy/reference count the metadata buffer
3787 if (streams_need_metadata > 1) {
3788 ALOGE("%s: not supporting request in which two streams requires"
3789 " 2 HAL metadata for reprocessing", __func__);
3790 pthread_mutex_unlock(&mMutex);
3791 return -EINVAL;
3792 }
3793
3794 if(request->input_buffer == NULL) {
3795 /* Set the parameters to backend:
3796 * - For every request in NORMAL MODE
3797 * - For every request in HFR mode during preview only case
3798 * - Once every batch in HFR mode during video recording
3799 */
3800 if (!mBatchSize ||
3801 (mBatchSize && !isVidBufRequested) ||
3802 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3803 CDBG("%s: set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3804 __func__, mBatchSize, isVidBufRequested,
3805 mToBeQueuedVidBufs);
3806 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3807 mParameters);
3808 if (rc < 0) {
3809 ALOGE("%s: set_parms failed", __func__);
3810 }
3811 /* reset to zero coz, the batch is queued */
3812 mToBeQueuedVidBufs = 0;
3813 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3814 }
3815 mPendingLiveRequest++;
3816 }
3817
3818 CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3819
3820 mFirstRequest = false;
3821 // Added a timed condition wait
3822 struct timespec ts;
3823 uint8_t isValidTimeout = 1;
3824 rc = clock_gettime(CLOCK_REALTIME, &ts);
3825 if (rc < 0) {
3826 isValidTimeout = 0;
3827 ALOGE("%s: Error reading the real time clock!!", __func__);
3828 }
3829 else {
3830 // Make timeout as 5 sec for request to be honored
3831 ts.tv_sec += 5;
3832 }
3833 //Block on conditional variable
3834 if (mBatchSize) {
3835 /* For HFR, more buffers are dequeued upfront to improve the performance */
3836 minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3837 maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3838 }
3839
3840 // Do not block in the middle of a batch.
3841 while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
3842 mToBeQueuedVidBufs == 0) {
3843 if (!isValidTimeout) {
3844 CDBG("%s: Blocking on conditional wait", __func__);
3845 pthread_cond_wait(&mRequestCond, &mMutex);
3846 }
3847 else {
3848 CDBG("%s: Blocking on timed conditional wait", __func__);
3849 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3850 if (rc == ETIMEDOUT) {
3851 rc = -ENODEV;
3852 ALOGE("%s: Unblocked on timeout!!!!", __func__);
3853 break;
3854 }
3855 }
3856 CDBG("%s: Unblocked", __func__);
3857 if (mWokenUpByDaemon) {
3858 mWokenUpByDaemon = false;
3859 if (mPendingLiveRequest < maxInFlightRequests)
3860 break;
3861 }
3862 }
3863 pthread_mutex_unlock(&mMutex);
3864
3865 return rc;
3866 }
3867
3868 /*===========================================================================
3869 * FUNCTION : dump
3870 *
3871 * DESCRIPTION:
3872 *
3873 * PARAMETERS :
3874 *
3875 *
3876 * RETURN :
3877 *==========================================================================*/
dump(int fd)3878 void QCamera3HardwareInterface::dump(int fd)
3879 {
3880 pthread_mutex_lock(&mMutex);
3881 dprintf(fd, "\n Camera HAL3 information Begin \n");
3882
3883 dprintf(fd, "\nNumber of pending requests: %zu \n",
3884 mPendingRequestsList.size());
3885 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3886 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
3887 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3888 for(pendingRequestIterator i = mPendingRequestsList.begin();
3889 i != mPendingRequestsList.end(); i++) {
3890 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3891 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3892 i->input_buffer);
3893 }
3894 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3895 mPendingBuffersMap.num_buffers);
3896 dprintf(fd, "-------+------------------\n");
3897 dprintf(fd, " Frame | Stream type mask \n");
3898 dprintf(fd, "-------+------------------\n");
3899 for(List<PendingBufferInfo>::iterator i =
3900 mPendingBuffersMap.mPendingBufferList.begin();
3901 i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3902 QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3903 dprintf(fd, " %5d | %11d \n",
3904 i->frame_number, channel->getStreamTypeMask());
3905 }
3906 dprintf(fd, "-------+------------------\n");
3907
3908 dprintf(fd, "\nPending frame drop list: %zu\n",
3909 mPendingFrameDropList.size());
3910 dprintf(fd, "-------+-----------\n");
3911 dprintf(fd, " Frame | Stream ID \n");
3912 dprintf(fd, "-------+-----------\n");
3913 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3914 i != mPendingFrameDropList.end(); i++) {
3915 dprintf(fd, " %5d | %9d \n",
3916 i->frame_number, i->stream_ID);
3917 }
3918 dprintf(fd, "-------+-----------\n");
3919
3920 dprintf(fd, "\n Camera HAL3 information End \n");
3921
3922 /* use dumpsys media.camera as trigger to send update debug level event */
3923 mUpdateDebugLevel = true;
3924 pthread_mutex_unlock(&mMutex);
3925 return;
3926 }
3927
3928 /*===========================================================================
3929 * FUNCTION : flush
3930 *
3931 * DESCRIPTION:
3932 *
3933 * PARAMETERS :
3934 *
3935 *
3936 * RETURN :
3937 *==========================================================================*/
flush()3938 int QCamera3HardwareInterface::flush()
3939 {
3940 ATRACE_CALL();
3941 int32_t rc = NO_ERROR;
3942
3943 CDBG("%s: Unblocking Process Capture Request", __func__);
3944 pthread_mutex_lock(&mMutex);
3945
3946 if (mFirstRequest) {
3947 pthread_mutex_unlock(&mMutex);
3948 return NO_ERROR;
3949 }
3950
3951 mFlush = true;
3952 pthread_mutex_unlock(&mMutex);
3953
3954 rc = stopAllChannels();
3955 if (rc < 0) {
3956 ALOGE("%s: stopAllChannels failed", __func__);
3957 return rc;
3958 }
3959 if (mChannelHandle) {
3960 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3961 mChannelHandle);
3962 }
3963
3964 // Reset bundle info
3965 rc = setBundleInfo();
3966 if (rc < 0) {
3967 ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3968 return rc;
3969 }
3970
3971 // Mutex Lock
3972 pthread_mutex_lock(&mMutex);
3973
3974 // Unblock process_capture_request
3975 mPendingLiveRequest = 0;
3976 pthread_cond_signal(&mRequestCond);
3977
3978 rc = notifyErrorForPendingRequests();
3979 if (rc < 0) {
3980 ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3981 pthread_mutex_unlock(&mMutex);
3982 return rc;
3983 }
3984
3985 mFlush = false;
3986
3987 // Start the Streams/Channels
3988 rc = startAllChannels();
3989 if (rc < 0) {
3990 ALOGE("%s: startAllChannels failed", __func__);
3991 pthread_mutex_unlock(&mMutex);
3992 return rc;
3993 }
3994
3995 if (mChannelHandle) {
3996 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3997 mChannelHandle);
3998 if (rc < 0) {
3999 ALOGE("%s: start_channel failed", __func__);
4000 pthread_mutex_unlock(&mMutex);
4001 return rc;
4002 }
4003 }
4004
4005 pthread_mutex_unlock(&mMutex);
4006
4007 return 0;
4008 }
4009
4010 /*===========================================================================
4011 * FUNCTION : captureResultCb
4012 *
4013 * DESCRIPTION: Callback handler for all capture result
4014 * (streams, as well as metadata)
4015 *
4016 * PARAMETERS :
4017 * @metadata : metadata information
4018 * @buffer : actual gralloc buffer to be returned to frameworks.
4019 * NULL if metadata.
4020 *
4021 * RETURN : NONE
4022 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4023 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4024 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4025 {
4026 if (metadata_buf) {
4027 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
4028 handleBatchMetadata(metadata_buf,
4029 true /* free_and_bufdone_meta_buf */);
4030 } else { /* mBatchSize = 0 */
4031 hdrPlusPerfLock(metadata_buf);
4032 pthread_mutex_lock(&mMutex);
4033 handleMetadataWithLock(metadata_buf,
4034 true /* free_and_bufdone_meta_buf */,
4035 false /* first frame of batch metadata */ );
4036 pthread_mutex_unlock(&mMutex);
4037 }
4038 } else if (isInputBuffer) {
4039 pthread_mutex_lock(&mMutex);
4040 handleInputBufferWithLock(frame_number);
4041 pthread_mutex_unlock(&mMutex);
4042 } else {
4043 pthread_mutex_lock(&mMutex);
4044 handleBufferWithLock(buffer, frame_number);
4045 pthread_mutex_unlock(&mMutex);
4046 }
4047 return;
4048 }
4049
4050 /*===========================================================================
4051 * FUNCTION : getReprocessibleOutputStreamId
4052 *
4053 * DESCRIPTION: Get source output stream id for the input reprocess stream
4054 * based on size and format, which would be the largest
4055 * output stream if an input stream exists.
4056 *
4057 * PARAMETERS :
4058 * @id : return the stream id if found
4059 *
4060 * RETURN : int32_t type of status
4061 * NO_ERROR -- success
4062 * none-zero failure code
4063 *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4064 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4065 {
4066 stream_info_t* stream = NULL;
4067
4068 /* check if any output or bidirectional stream with the same size and format
4069 and return that stream */
4070 if ((mInputStreamInfo.dim.width > 0) &&
4071 (mInputStreamInfo.dim.height > 0)) {
4072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4073 it != mStreamInfo.end(); it++) {
4074
4075 camera3_stream_t *stream = (*it)->stream;
4076 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4077 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4078 (stream->format == mInputStreamInfo.format)) {
4079 // Usage flag for an input stream and the source output stream
4080 // may be different.
4081 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
4082 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
4083 __func__, stream->usage, mInputStreamInfo.usage);
4084
4085 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4086 if (channel != NULL && channel->mStreams[0]) {
4087 id = channel->mStreams[0]->getMyServerID();
4088 return NO_ERROR;
4089 }
4090 }
4091 }
4092 } else {
4093 CDBG("%s: No input stream, so no reprocessible output stream", __func__);
4094 }
4095 return NAME_NOT_FOUND;
4096 }
4097
4098 /*===========================================================================
4099 * FUNCTION : lookupFwkName
4100 *
4101 * DESCRIPTION: In case the enum is not same in fwk and backend
4102 * make sure the parameter is correctly propogated
4103 *
4104 * PARAMETERS :
4105 * @arr : map between the two enums
4106 * @len : len of the map
4107 * @hal_name : name of the hal_parm to map
4108 *
4109 * RETURN : int type of status
4110 * fwk_name -- success
4111 * none-zero failure code
4112 *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4113 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4114 size_t len, halType hal_name)
4115 {
4116
4117 for (size_t i = 0; i < len; i++) {
4118 if (arr[i].hal_name == hal_name) {
4119 return arr[i].fwk_name;
4120 }
4121 }
4122
4123 /* Not able to find matching framework type is not necessarily
4124 * an error case. This happens when mm-camera supports more attributes
4125 * than the frameworks do */
4126 CDBG_HIGH("%s: Cannot find matching framework type", __func__);
4127 return NAME_NOT_FOUND;
4128 }
4129
4130 /*===========================================================================
4131 * FUNCTION : lookupHalName
4132 *
4133 * DESCRIPTION: In case the enum is not same in fwk and backend
4134 * make sure the parameter is correctly propogated
4135 *
4136 * PARAMETERS :
4137 * @arr : map between the two enums
4138 * @len : len of the map
4139 * @fwk_name : name of the hal_parm to map
4140 *
4141 * RETURN : int32_t type of status
4142 * hal_name -- success
4143 * none-zero failure code
4144 *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)4145 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4146 size_t len, fwkType fwk_name)
4147 {
4148 for (size_t i = 0; i < len; i++) {
4149 if (arr[i].fwk_name == fwk_name) {
4150 return arr[i].hal_name;
4151 }
4152 }
4153
4154 ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
4155 return NAME_NOT_FOUND;
4156 }
4157
4158 /*===========================================================================
4159 * FUNCTION : lookupProp
4160 *
4161 * DESCRIPTION: lookup a value by its name
4162 *
4163 * PARAMETERS :
4164 * @arr : map between the two enums
4165 * @len : size of the map
4166 * @name : name to be looked up
4167 *
4168 * RETURN : Value if found
4169 * CAM_CDS_MODE_MAX if not found
4170 *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)4171 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4172 size_t len, const char *name)
4173 {
4174 if (name) {
4175 for (size_t i = 0; i < len; i++) {
4176 if (!strcmp(arr[i].desc, name)) {
4177 return arr[i].val;
4178 }
4179 }
4180 }
4181 return CAM_CDS_MODE_MAX;
4182 }
4183
4184 /*===========================================================================
4185 *
4186 * DESCRIPTION:
4187 *
4188 * PARAMETERS :
4189 * @metadata : metadata information from callback
4190 * @timestamp: metadata buffer timestamp
4191 * @request_id: request id
4192 * @hybrid_ae_enable: whether hybrid ae is enabled
4193 * @jpegMetadata: additional jpeg metadata
4194 * @pprocDone: whether internal offline postprocsesing is done
4195 *
4196 * RETURN : camera_metadata_t*
4197 * metadata in a format specified by fwk
4198 *==========================================================================*/
4199 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,bool pprocDone,bool dynamic_blklvl,bool firstMetadataInBatch)4200 QCamera3HardwareInterface::translateFromHalMetadata(
4201 metadata_buffer_t *metadata,
4202 nsecs_t timestamp,
4203 int32_t request_id,
4204 const CameraMetadata& jpegMetadata,
4205 uint8_t pipeline_depth,
4206 uint8_t capture_intent,
4207 uint8_t hybrid_ae_enable,
4208 bool pprocDone,
4209 bool dynamic_blklvl,
4210 bool firstMetadataInBatch)
4211 {
4212 CameraMetadata camMetadata;
4213 camera_metadata_t *resultMetadata;
4214
4215 if (mBatchSize && !firstMetadataInBatch) {
4216 /* In batch mode, use cached metadata from the first metadata
4217 in the batch */
4218 camMetadata.clear();
4219 camMetadata = mCachedMetadata;
4220 }
4221
4222 if (jpegMetadata.entryCount())
4223 camMetadata.append(jpegMetadata);
4224
4225 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1);
4226 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4227 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4228 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4229 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4230
4231 if (mBatchSize && !firstMetadataInBatch) {
4232 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4233 resultMetadata = camMetadata.release();
4234 return resultMetadata;
4235 }
4236
4237 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4238 int64_t fwk_frame_number = *frame_number;
4239 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4240 }
4241
4242 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4243 int32_t fps_range[2];
4244 fps_range[0] = (int32_t)float_range->min_fps;
4245 fps_range[1] = (int32_t)float_range->max_fps;
4246 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4247 fps_range, 2);
4248 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4249 __func__, fps_range[0], fps_range[1]);
4250 }
4251
4252 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4253 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4254 }
4255
4256 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4257 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4258 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4259 *sceneMode);
4260 if (NAME_NOT_FOUND != val) {
4261 uint8_t fwkSceneMode = (uint8_t)val;
4262 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4263 CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4264 __func__, fwkSceneMode);
4265 }
4266 }
4267
4268 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4269 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4270 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4271 }
4272
4273 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4274 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4275 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4276 }
4277
4278 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4279 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4280 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4281 }
4282
4283 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4284 CAM_INTF_META_EDGE_MODE, metadata) {
4285 uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4286 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4287 }
4288
4289 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4290 uint8_t fwk_flashPower = (uint8_t) *flashPower;
4291 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4292 }
4293
4294 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4295 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4296 }
4297
4298 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4299 if (0 <= *flashState) {
4300 uint8_t fwk_flashState = (uint8_t) *flashState;
4301 if (!gCamCapability[mCameraId]->flash_available) {
4302 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4303 }
4304 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4305 }
4306 }
4307
4308 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4309 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4310 if (NAME_NOT_FOUND != val) {
4311 uint8_t fwk_flashMode = (uint8_t)val;
4312 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4313 }
4314 }
4315
4316 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4317 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4318 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4319 }
4320
4321 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4322 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4323 }
4324
4325 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4326 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4327 }
4328
4329 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4330 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4331 }
4332
4333 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4334 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4335 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4336 }
4337
4338 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4339 uint8_t fwk_videoStab = (uint8_t) *videoStab;
4340 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4341 }
4342
4343 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4344 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4345 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4346 }
4347
4348 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4349 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4350 }
4351
4352 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4353 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4354
4355 CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4356 blackLevelSourcePattern->cam_black_level[0],
4357 blackLevelSourcePattern->cam_black_level[1],
4358 blackLevelSourcePattern->cam_black_level[2],
4359 blackLevelSourcePattern->cam_black_level[3]);
4360 }
4361
4362 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4363 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4364 float fwk_blackLevelInd[4];
4365
4366 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4367 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4368 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4369 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4370
4371 CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4372 blackLevelAppliedPattern->cam_black_level[0],
4373 blackLevelAppliedPattern->cam_black_level[1],
4374 blackLevelAppliedPattern->cam_black_level[2],
4375 blackLevelAppliedPattern->cam_black_level[3]);
4376 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4377 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4378
4379 // if dynmaic_blklvl is true, we calculate blklvl from raw callback
4380 // otherwise, use the value from linearization LUT.
4381 if (dynamic_blklvl == false) {
4382 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4383 // depth space.
4384 fwk_blackLevelInd[0] /= 64.0;
4385 fwk_blackLevelInd[1] /= 64.0;
4386 fwk_blackLevelInd[2] /= 64.0;
4387 fwk_blackLevelInd[3] /= 64.0;
4388 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4389 }
4390 }
4391
4392 // Fixed whitelevel is used by ISP/Sensor
4393 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4394 &gCamCapability[mCameraId]->white_level, 1);
4395
4396 if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4397 gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4398 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4399 for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4400 opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4401 }
4402 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4403 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4404 }
4405
4406 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4407 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4408 int32_t scalerCropRegion[4];
4409 scalerCropRegion[0] = hScalerCropRegion->left;
4410 scalerCropRegion[1] = hScalerCropRegion->top;
4411 scalerCropRegion[2] = hScalerCropRegion->width;
4412 scalerCropRegion[3] = hScalerCropRegion->height;
4413
4414 // Adjust crop region from sensor output coordinate system to active
4415 // array coordinate system.
4416 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4417 scalerCropRegion[2], scalerCropRegion[3]);
4418
4419 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4420 }
4421
4422 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4423 CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4424 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4425 }
4426
4427 IF_META_AVAILABLE(int64_t, sensorFameDuration,
4428 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4429 CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4430 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4431 }
4432
4433 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4434 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4435 CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4436 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4437 sensorRollingShutterSkew, 1);
4438 }
4439
4440 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4441 CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4442 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4443
4444 //calculate the noise profile based on sensitivity
4445 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4446 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4447 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4448 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4449 noise_profile[i] = noise_profile_S;
4450 noise_profile[i+1] = noise_profile_O;
4451 }
4452 CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4453 noise_profile_S, noise_profile_O);
4454 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4455 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4456 }
4457
4458 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4459 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4460 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4461 }
4462
4463 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4464 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4465 *faceDetectMode);
4466 if (NAME_NOT_FOUND != val) {
4467 uint8_t fwk_faceDetectMode = (uint8_t)val;
4468 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4469
4470 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4471 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4472 CAM_INTF_META_FACE_DETECTION, metadata) {
4473 uint8_t numFaces = MIN(
4474 faceDetectionInfo->num_faces_detected, MAX_ROI);
4475 int32_t faceIds[MAX_ROI];
4476 uint8_t faceScores[MAX_ROI];
4477 int32_t faceRectangles[MAX_ROI * 4];
4478 int32_t faceLandmarks[MAX_ROI * 6];
4479 size_t j = 0, k = 0;
4480
4481 for (size_t i = 0; i < numFaces; i++) {
4482 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4483 // Adjust crop region from sensor output coordinate system to active
4484 // array coordinate system.
4485 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4486 mCropRegionMapper.toActiveArray(rect.left, rect.top,
4487 rect.width, rect.height);
4488
4489 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4490 faceRectangles+j, -1);
4491
4492 // Map the co-ordinate sensor output coordinate system to active
4493 // array coordinate system.
4494 cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4495 mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4496 face.left_eye_center.y);
4497 mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4498 face.right_eye_center.y);
4499 mCropRegionMapper.toActiveArray(face.mouth_center.x,
4500 face.mouth_center.y);
4501
4502 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4503 j+= 4;
4504 k+= 6;
4505 }
4506 if (numFaces <= 0) {
4507 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4508 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4509 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4510 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4511 }
4512
4513 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4514 numFaces);
4515 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4516 faceRectangles, numFaces * 4U);
4517 if (fwk_faceDetectMode ==
4518 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4519 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4520 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4521 faceLandmarks, numFaces * 6U);
4522 }
4523 }
4524 }
4525 }
4526 }
4527
4528 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4529 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4530 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4531 }
4532
4533 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4534 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4535 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4536 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4537 }
4538
4539 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4540 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4541 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4542 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4543 }
4544
4545 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4546 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4547 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4548 CAM_MAX_SHADING_MAP_HEIGHT);
4549 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4550 CAM_MAX_SHADING_MAP_WIDTH);
4551 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4552 lensShadingMap->lens_shading, 4U * map_width * map_height);
4553 }
4554
4555 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4556 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4557 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4558 }
4559
4560 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4561 //Populate CAM_INTF_META_TONEMAP_CURVES
4562 /* ch0 = G, ch 1 = B, ch 2 = R*/
4563 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4564 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4565 __func__, tonemap->tonemap_points_cnt,
4566 CAM_MAX_TONEMAP_CURVE_SIZE);
4567 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4568 }
4569
4570 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4571 &tonemap->curves[0].tonemap_points[0][0],
4572 tonemap->tonemap_points_cnt * 2);
4573
4574 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4575 &tonemap->curves[1].tonemap_points[0][0],
4576 tonemap->tonemap_points_cnt * 2);
4577
4578 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4579 &tonemap->curves[2].tonemap_points[0][0],
4580 tonemap->tonemap_points_cnt * 2);
4581 }
4582
4583 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4584 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4585 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4586 CC_GAINS_COUNT);
4587 }
4588
4589 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4590 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4591 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4592 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4593 CC_MATRIX_COLS * CC_MATRIX_ROWS);
4594 }
4595
4596 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4597 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4598 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4599 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4600 __func__, toneCurve->tonemap_points_cnt,
4601 CAM_MAX_TONEMAP_CURVE_SIZE);
4602 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4603 }
4604 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4605 (float*)toneCurve->curve.tonemap_points,
4606 toneCurve->tonemap_points_cnt * 2);
4607 }
4608
4609 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4610 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4611 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4612 predColorCorrectionGains->gains, 4);
4613 }
4614
4615 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4616 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4617 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4618 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4619 CC_MATRIX_ROWS * CC_MATRIX_COLS);
4620 }
4621
4622 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4623 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4624 }
4625
4626 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4627 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4628 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4629 }
4630
4631 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4632 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4633 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4634 }
4635
4636 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4637 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4638 *effectMode);
4639 if (NAME_NOT_FOUND != val) {
4640 uint8_t fwk_effectMode = (uint8_t)val;
4641 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4642 }
4643 }
4644
4645 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4646 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4647 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4648 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4649 if (NAME_NOT_FOUND != fwk_testPatternMode) {
4650 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4651 }
4652 int32_t fwk_testPatternData[4];
4653 fwk_testPatternData[0] = testPatternData->r;
4654 fwk_testPatternData[3] = testPatternData->b;
4655 switch (gCamCapability[mCameraId]->color_arrangement) {
4656 case CAM_FILTER_ARRANGEMENT_RGGB:
4657 case CAM_FILTER_ARRANGEMENT_GRBG:
4658 fwk_testPatternData[1] = testPatternData->gr;
4659 fwk_testPatternData[2] = testPatternData->gb;
4660 break;
4661 case CAM_FILTER_ARRANGEMENT_GBRG:
4662 case CAM_FILTER_ARRANGEMENT_BGGR:
4663 fwk_testPatternData[2] = testPatternData->gr;
4664 fwk_testPatternData[1] = testPatternData->gb;
4665 break;
4666 default:
4667 ALOGE("%s: color arrangement %d is not supported", __func__,
4668 gCamCapability[mCameraId]->color_arrangement);
4669 break;
4670 }
4671 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4672 }
4673
4674 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4675 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4676 }
4677
4678 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4679 String8 str((const char *)gps_methods);
4680 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4681 }
4682
4683 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4684 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4685 }
4686
4687 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4688 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4689 }
4690
4691 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4692 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4693 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4694 }
4695
4696 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4697 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4698 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4699 }
4700
4701 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4702 int32_t fwk_thumb_size[2];
4703 fwk_thumb_size[0] = thumb_size->width;
4704 fwk_thumb_size[1] = thumb_size->height;
4705 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4706 }
4707
4708 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4709 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4710 privateData,
4711 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4712 }
4713
4714 if (metadata->is_tuning_params_valid) {
4715 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4716 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4717 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4718
4719
4720 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4721 sizeof(uint32_t));
4722 data += sizeof(uint32_t);
4723
4724 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4725 sizeof(uint32_t));
4726 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4727 data += sizeof(uint32_t);
4728
4729 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4730 sizeof(uint32_t));
4731 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4732 data += sizeof(uint32_t);
4733
4734 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4735 sizeof(uint32_t));
4736 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4737 data += sizeof(uint32_t);
4738
4739 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4740 sizeof(uint32_t));
4741 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4742 data += sizeof(uint32_t);
4743
4744 metadata->tuning_params.tuning_mod3_data_size = 0;
4745 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4746 sizeof(uint32_t));
4747 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4748 data += sizeof(uint32_t);
4749
4750 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4751 TUNING_SENSOR_DATA_MAX);
4752 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4753 count);
4754 data += count;
4755
4756 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4757 TUNING_VFE_DATA_MAX);
4758 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4759 count);
4760 data += count;
4761
4762 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4763 TUNING_CPP_DATA_MAX);
4764 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4765 count);
4766 data += count;
4767
4768 count = MIN(metadata->tuning_params.tuning_cac_data_size,
4769 TUNING_CAC_DATA_MAX);
4770 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4771 count);
4772 data += count;
4773
4774 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4775 (int32_t *)(void *)tuning_meta_data_blob,
4776 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4777 }
4778
4779 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4780 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4781 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4782 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4783 NEUTRAL_COL_POINTS);
4784 }
4785
4786 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4787 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4788 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4789 }
4790
4791 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4792 int32_t aeRegions[REGIONS_TUPLE_COUNT];
4793 // Adjust crop region from sensor output coordinate system to active
4794 // array coordinate system.
4795 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4796 hAeRegions->rect.width, hAeRegions->rect.height);
4797
4798 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4799 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4800 REGIONS_TUPLE_COUNT);
4801 CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4802 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4803 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4804 hAeRegions->rect.height);
4805 }
4806
4807 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4808 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4809 if (NAME_NOT_FOUND != val) {
4810 uint8_t fwkAfMode = (uint8_t)val;
4811 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4812 CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
4813 } else {
4814 CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
4815 __func__, val);
4816 }
4817 }
4818
4819 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4820 uint8_t fwk_afState = (uint8_t) *afState;
4821 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4822 CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4823 }
4824
4825 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4826 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4827 }
4828
4829 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4830 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4831 }
4832
4833 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4834 uint8_t fwk_lensState = *lensState;
4835 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4836 }
4837
4838 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4839 /*af regions*/
4840 int32_t afRegions[REGIONS_TUPLE_COUNT];
4841 // Adjust crop region from sensor output coordinate system to active
4842 // array coordinate system.
4843 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4844 hAfRegions->rect.width, hAfRegions->rect.height);
4845
4846 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4847 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4848 REGIONS_TUPLE_COUNT);
4849 CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4850 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4851 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4852 hAfRegions->rect.height);
4853 }
4854
4855 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4856 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4857 *hal_ab_mode);
4858 if (NAME_NOT_FOUND != val) {
4859 uint8_t fwk_ab_mode = (uint8_t)val;
4860 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4861 }
4862 }
4863
4864 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4865 int val = lookupFwkName(SCENE_MODES_MAP,
4866 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4867 if (NAME_NOT_FOUND != val) {
4868 uint8_t fwkBestshotMode = (uint8_t)val;
4869 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4870 CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4871 } else {
4872 CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4873 }
4874 }
4875
4876 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4877 uint8_t fwk_mode = (uint8_t) *mode;
4878 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4879 }
4880
4881 /* Constant metadata values to be update*/
4882 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4883 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4884
4885 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4886 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4887
4888 int32_t hotPixelMap[2];
4889 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4890
4891 // CDS
4892 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4893 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4894 }
4895
4896 // TNR
4897 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4898 uint8_t tnr_enable = tnr->denoise_enable;
4899 int32_t tnr_process_type = (int32_t)tnr->process_plates;
4900
4901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4902 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4903 }
4904
4905 // Reprocess crop data
4906 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4907 uint8_t cnt = crop_data->num_of_streams;
4908 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4909 // mm-qcamera-daemon only posts crop_data for streams
4910 // not linked to pproc. So no valid crop metadata is not
4911 // necessarily an error case.
4912 CDBG("%s: No valid crop metadata entries", __func__);
4913 } else {
4914 uint32_t reproc_stream_id;
4915 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4916 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4917 } else {
4918 int rc = NO_ERROR;
4919 Vector<int32_t> roi_map;
4920 int32_t *crop = new int32_t[cnt*4];
4921 if (NULL == crop) {
4922 rc = NO_MEMORY;
4923 }
4924 if (NO_ERROR == rc) {
4925 int32_t streams_found = 0;
4926 for (size_t i = 0; i < cnt; i++) {
4927 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4928 if (pprocDone) {
4929 // HAL already does internal reprocessing,
4930 // either via reprocessing before JPEG encoding,
4931 // or offline postprocessing for pproc bypass case.
4932 crop[0] = 0;
4933 crop[1] = 0;
4934 crop[2] = mInputStreamInfo.dim.width;
4935 crop[3] = mInputStreamInfo.dim.height;
4936 } else {
4937 crop[0] = crop_data->crop_info[i].crop.left;
4938 crop[1] = crop_data->crop_info[i].crop.top;
4939 crop[2] = crop_data->crop_info[i].crop.width;
4940 crop[3] = crop_data->crop_info[i].crop.height;
4941 }
4942 roi_map.add(crop_data->crop_info[i].roi_map.left);
4943 roi_map.add(crop_data->crop_info[i].roi_map.top);
4944 roi_map.add(crop_data->crop_info[i].roi_map.width);
4945 roi_map.add(crop_data->crop_info[i].roi_map.height);
4946 streams_found++;
4947 CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4948 __func__,
4949 crop[0], crop[1], crop[2], crop[3]);
4950 CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4951 __func__,
4952 crop_data->crop_info[i].roi_map.left,
4953 crop_data->crop_info[i].roi_map.top,
4954 crop_data->crop_info[i].roi_map.width,
4955 crop_data->crop_info[i].roi_map.height);
4956 break;
4957
4958 }
4959 }
4960 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4961 &streams_found, 1);
4962 camMetadata.update(QCAMERA3_CROP_REPROCESS,
4963 crop, (size_t)(streams_found * 4));
4964 if (roi_map.array()) {
4965 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4966 roi_map.array(), roi_map.size());
4967 }
4968 }
4969 if (crop) {
4970 delete [] crop;
4971 }
4972 }
4973 }
4974 }
4975
4976 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4977 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4978 *cacMode);
4979 if (NAME_NOT_FOUND != val) {
4980 uint8_t fwkCacMode = (uint8_t)val;
4981 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4982 } else {
4983 ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4984 }
4985 }
4986
4987 // Post blob of cam_cds_data through vendor tag.
4988 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4989 uint8_t cnt = cdsInfo->num_of_streams;
4990 cam_cds_data_t cdsDataOverride;
4991 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4992 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4993 cdsDataOverride.num_of_streams = 1;
4994 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4995 uint32_t reproc_stream_id;
4996 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4997 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4998 } else {
4999 for (size_t i = 0; i < cnt; i++) {
5000 if (cdsInfo->cds_info[i].stream_id ==
5001 reproc_stream_id) {
5002 cdsDataOverride.cds_info[0].cds_enable =
5003 cdsInfo->cds_info[i].cds_enable;
5004 break;
5005 }
5006 }
5007 }
5008 } else {
5009 CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
5010 }
5011 camMetadata.update(QCAMERA3_CDS_INFO,
5012 (uint8_t *)&cdsDataOverride,
5013 sizeof(cam_cds_data_t));
5014 }
5015
5016 // Ldaf calibration data
5017 if (!mLdafCalibExist) {
5018 IF_META_AVAILABLE(uint32_t, ldafCalib,
5019 CAM_INTF_META_LDAF_EXIF, metadata) {
5020 mLdafCalibExist = true;
5021 mLdafCalib[0] = ldafCalib[0];
5022 mLdafCalib[1] = ldafCalib[1];
5023 CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
5024 ldafCalib[0], ldafCalib[1]);
5025 }
5026 }
5027
5028 // Post Raw Sensitivity Boost = ISP digital gain
5029 IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
5030 int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
5031 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
5032 }
5033
5034 /* In batch mode, cache the first metadata in the batch */
5035 if (mBatchSize && firstMetadataInBatch) {
5036 mCachedMetadata.clear();
5037 mCachedMetadata = camMetadata;
5038 }
5039
5040 resultMetadata = camMetadata.release();
5041 return resultMetadata;
5042 }
5043
5044 /*===========================================================================
5045 * FUNCTION : saveExifParams
5046 *
5047 * DESCRIPTION:
5048 *
5049 * PARAMETERS :
5050 * @metadata : metadata information from callback
5051 *
5052 * RETURN : none
5053 *
5054 *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)5055 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5056 {
5057 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5058 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5059 mExifParams.ae_debug_params = *ae_exif_debug_params;
5060 mExifParams.ae_debug_params_valid = TRUE;
5061 }
5062 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5063 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5064 mExifParams.awb_debug_params = *awb_exif_debug_params;
5065 mExifParams.awb_debug_params_valid = TRUE;
5066 }
5067 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5068 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5069 mExifParams.af_debug_params = *af_exif_debug_params;
5070 mExifParams.af_debug_params_valid = TRUE;
5071 }
5072 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5073 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5074 mExifParams.asd_debug_params = *asd_exif_debug_params;
5075 mExifParams.asd_debug_params_valid = TRUE;
5076 }
5077 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5078 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5079 mExifParams.stats_debug_params = *stats_exif_debug_params;
5080 mExifParams.stats_debug_params_valid = TRUE;
5081 }
5082 }
5083
5084 /*===========================================================================
5085 * FUNCTION : get3AExifParams
5086 *
5087 * DESCRIPTION:
5088 *
5089 * PARAMETERS : none
5090 *
5091 *
5092 * RETURN : mm_jpeg_exif_params_t
5093 *
5094 *==========================================================================*/
get3AExifParams()5095 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5096 {
5097 return mExifParams;
5098 }
5099
5100 /*===========================================================================
5101 * FUNCTION : translateCbUrgentMetadataToResultMetadata
5102 *
5103 * DESCRIPTION:
5104 *
5105 * PARAMETERS :
5106 * @metadata : metadata information from callback
5107 *
5108 * RETURN : camera_metadata_t*
5109 * metadata in a format specified by fwk
5110 *==========================================================================*/
5111 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)5112 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5113 (metadata_buffer_t *metadata)
5114 {
5115 CameraMetadata camMetadata;
5116 camera_metadata_t *resultMetadata;
5117
5118
5119 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5120 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5121 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5122 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
5123 }
5124
5125 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5126 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5127 &aecTrigger->trigger, 1);
5128 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5129 &aecTrigger->trigger_id, 1);
5130 CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5131 __func__, aecTrigger->trigger);
5132 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
5133 aecTrigger->trigger_id);
5134 }
5135
5136 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5137 uint8_t fwk_ae_state = (uint8_t) *ae_state;
5138 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5139 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
5140 }
5141
5142 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5143 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5144 &af_trigger->trigger, 1);
5145 CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5146 __func__, af_trigger->trigger);
5147 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5148 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
5149 af_trigger->trigger_id);
5150 }
5151
5152 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5153 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5154 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5155 if (NAME_NOT_FOUND != val) {
5156 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5157 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5158 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
5159 } else {
5160 CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
5161 }
5162 }
5163
5164 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5165 uint32_t aeMode = CAM_AE_MODE_MAX;
5166 int32_t flashMode = CAM_FLASH_MODE_MAX;
5167 int32_t redeye = -1;
5168 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5169 aeMode = *pAeMode;
5170 }
5171 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5172 flashMode = *pFlashMode;
5173 }
5174 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5175 redeye = *pRedeye;
5176 }
5177
5178 if (1 == redeye) {
5179 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5180 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5181 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5182 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5183 flashMode);
5184 if (NAME_NOT_FOUND != val) {
5185 fwk_aeMode = (uint8_t)val;
5186 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5187 } else {
5188 ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
5189 }
5190 } else if (aeMode == CAM_AE_MODE_ON) {
5191 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5192 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5193 } else if (aeMode == CAM_AE_MODE_OFF) {
5194 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5195 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5196 } else {
5197 ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5198 "flashMode:%d, aeMode:%u!!!",
5199 __func__, redeye, flashMode, aeMode);
5200 }
5201
5202 resultMetadata = camMetadata.release();
5203 return resultMetadata;
5204 }
5205
5206 /*===========================================================================
5207 * FUNCTION : dumpMetadataToFile
5208 *
5209 * DESCRIPTION: Dumps tuning metadata to file system
5210 *
5211 * PARAMETERS :
5212 * @meta : tuning metadata
5213 * @dumpFrameCount : current dump frame count
5214 * @enabled : Enable mask
5215 *
5216 *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)5217 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5218 uint32_t &dumpFrameCount,
5219 bool enabled,
5220 const char *type,
5221 uint32_t frameNumber)
5222 {
5223 uint32_t frm_num = 0;
5224
5225 //Some sanity checks
5226 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5227 ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
5228 __func__,
5229 meta.tuning_sensor_data_size,
5230 TUNING_SENSOR_DATA_MAX);
5231 return;
5232 }
5233
5234 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5235 ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5236 __func__,
5237 meta.tuning_vfe_data_size,
5238 TUNING_VFE_DATA_MAX);
5239 return;
5240 }
5241
5242 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5243 ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5244 __func__,
5245 meta.tuning_cpp_data_size,
5246 TUNING_CPP_DATA_MAX);
5247 return;
5248 }
5249
5250 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5251 ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5252 __func__,
5253 meta.tuning_cac_data_size,
5254 TUNING_CAC_DATA_MAX);
5255 return;
5256 }
5257 //
5258
5259 if(enabled){
5260 char timeBuf[FILENAME_MAX];
5261 char buf[FILENAME_MAX];
5262 memset(buf, 0, sizeof(buf));
5263 memset(timeBuf, 0, sizeof(timeBuf));
5264 time_t current_time;
5265 struct tm * timeinfo;
5266 time (¤t_time);
5267 timeinfo = localtime (¤t_time);
5268 if (timeinfo != NULL) {
5269 strftime (timeBuf, sizeof(timeBuf),
5270 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5271 }
5272 String8 filePath(timeBuf);
5273 snprintf(buf,
5274 sizeof(buf),
5275 "%dm_%s_%d.bin",
5276 dumpFrameCount,
5277 type,
5278 frameNumber);
5279 filePath.append(buf);
5280 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5281 if (file_fd >= 0) {
5282 ssize_t written_len = 0;
5283 meta.tuning_data_version = TUNING_DATA_VERSION;
5284 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5285 written_len += write(file_fd, data, sizeof(uint32_t));
5286 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5287 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5288 written_len += write(file_fd, data, sizeof(uint32_t));
5289 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5290 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5291 written_len += write(file_fd, data, sizeof(uint32_t));
5292 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5293 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5294 written_len += write(file_fd, data, sizeof(uint32_t));
5295 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5296 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5297 written_len += write(file_fd, data, sizeof(uint32_t));
5298 meta.tuning_mod3_data_size = 0;
5299 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5300 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5301 written_len += write(file_fd, data, sizeof(uint32_t));
5302 size_t total_size = meta.tuning_sensor_data_size;
5303 data = (void *)((uint8_t *)&meta.data);
5304 written_len += write(file_fd, data, total_size);
5305 total_size = meta.tuning_vfe_data_size;
5306 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5307 written_len += write(file_fd, data, total_size);
5308 total_size = meta.tuning_cpp_data_size;
5309 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5310 written_len += write(file_fd, data, total_size);
5311 total_size = meta.tuning_cac_data_size;
5312 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5313 written_len += write(file_fd, data, total_size);
5314 close(file_fd);
5315 }else {
5316 ALOGE("%s: fail to open file for metadata dumping", __func__);
5317 }
5318 }
5319 }
5320
5321 /*===========================================================================
5322 * FUNCTION : cleanAndSortStreamInfo
5323 *
5324 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5325 * and sort them such that raw stream is at the end of the list
5326 * This is a workaround for camera daemon constraint.
5327 *
5328 * PARAMETERS : None
5329 *
5330 *==========================================================================*/
cleanAndSortStreamInfo()5331 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5332 {
5333 List<stream_info_t *> newStreamInfo;
5334
5335 /*clean up invalid streams*/
5336 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5337 it != mStreamInfo.end();) {
5338 if(((*it)->status) == INVALID){
5339 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5340 delete channel;
5341 free(*it);
5342 it = mStreamInfo.erase(it);
5343 } else {
5344 it++;
5345 }
5346 }
5347
5348 // Move preview/video/callback/snapshot streams into newList
5349 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5350 it != mStreamInfo.end();) {
5351 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5352 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5353 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5354 newStreamInfo.push_back(*it);
5355 it = mStreamInfo.erase(it);
5356 } else
5357 it++;
5358 }
5359 // Move raw streams into newList
5360 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5361 it != mStreamInfo.end();) {
5362 newStreamInfo.push_back(*it);
5363 it = mStreamInfo.erase(it);
5364 }
5365
5366 mStreamInfo = newStreamInfo;
5367 }
5368
5369 /*===========================================================================
5370 * FUNCTION : extractJpegMetadata
5371 *
5372 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5373 * JPEG metadata is cached in HAL, and return as part of capture
5374 * result when metadata is returned from camera daemon.
5375 *
5376 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5377 * @request: capture request
5378 *
5379 *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)5380 void QCamera3HardwareInterface::extractJpegMetadata(
5381 CameraMetadata& jpegMetadata,
5382 const camera3_capture_request_t *request)
5383 {
5384 CameraMetadata frame_settings;
5385 frame_settings = request->settings;
5386
5387 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5388 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5389 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5390 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5391
5392 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5393 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5394 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5395 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5396
5397 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5398 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5399 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5400 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5401
5402 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5403 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5404 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5405 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5406
5407 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5408 jpegMetadata.update(ANDROID_JPEG_QUALITY,
5409 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5410 frame_settings.find(ANDROID_JPEG_QUALITY).count);
5411
5412 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5413 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5414 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5415 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5416
5417 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5418 int32_t thumbnail_size[2];
5419 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5420 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5421 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5422 int32_t orientation =
5423 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5424 if ((orientation == 90) || (orientation == 270)) {
5425 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5426 int32_t temp;
5427 temp = thumbnail_size[0];
5428 thumbnail_size[0] = thumbnail_size[1];
5429 thumbnail_size[1] = temp;
5430 }
5431 }
5432 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5433 thumbnail_size,
5434 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5435 }
5436
5437 }
5438
5439 /*===========================================================================
5440 * FUNCTION : convertToRegions
5441 *
5442 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5443 *
5444 * PARAMETERS :
5445 * @rect : cam_rect_t struct to convert
5446 * @region : int32_t destination array
5447 * @weight : if we are converting from cam_area_t, weight is valid
5448 * else weight = -1
5449 *
5450 *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)5451 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5452 int32_t *region, int weight)
5453 {
5454 region[0] = rect.left;
5455 region[1] = rect.top;
5456 region[2] = rect.left + rect.width;
5457 region[3] = rect.top + rect.height;
5458 if (weight > -1) {
5459 region[4] = weight;
5460 }
5461 }
5462
5463 /*===========================================================================
5464 * FUNCTION : convertFromRegions
5465 *
5466 * DESCRIPTION: helper method to convert from array to cam_rect_t
5467 *
5468 * PARAMETERS :
5469 * @rect : cam_rect_t struct to convert
5470 * @region : int32_t destination array
5471 * @weight : if we are converting from cam_area_t, weight is valid
5472 * else weight = -1
5473 *
5474 *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)5475 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5476 const camera_metadata_t *settings, uint32_t tag)
5477 {
5478 CameraMetadata frame_settings;
5479 frame_settings = settings;
5480 int32_t x_min = frame_settings.find(tag).data.i32[0];
5481 int32_t y_min = frame_settings.find(tag).data.i32[1];
5482 int32_t x_max = frame_settings.find(tag).data.i32[2];
5483 int32_t y_max = frame_settings.find(tag).data.i32[3];
5484 roi.weight = frame_settings.find(tag).data.i32[4];
5485 roi.rect.left = x_min;
5486 roi.rect.top = y_min;
5487 roi.rect.width = x_max - x_min;
5488 roi.rect.height = y_max - y_min;
5489 }
5490
5491 /*===========================================================================
5492 * FUNCTION : resetIfNeededROI
5493 *
5494 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5495 * crop region
5496 *
5497 * PARAMETERS :
5498 * @roi : cam_area_t struct to resize
5499 * @scalerCropRegion : cam_crop_region_t region to compare against
5500 *
5501 *
5502 *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)5503 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5504 const cam_crop_region_t* scalerCropRegion)
5505 {
5506 int32_t roi_x_max = roi->rect.width + roi->rect.left;
5507 int32_t roi_y_max = roi->rect.height + roi->rect.top;
5508 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5509 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5510
5511 /* According to spec weight = 0 is used to indicate roi needs to be disabled
5512 * without having this check the calculations below to validate if the roi
5513 * is inside scalar crop region will fail resulting in the roi not being
5514 * reset causing algorithm to continue to use stale roi window
5515 */
5516 if (roi->weight == 0) {
5517 return true;
5518 }
5519
5520 if ((roi_x_max < scalerCropRegion->left) ||
5521 // right edge of roi window is left of scalar crop's left edge
5522 (roi_y_max < scalerCropRegion->top) ||
5523 // bottom edge of roi window is above scalar crop's top edge
5524 (roi->rect.left > crop_x_max) ||
5525 // left edge of roi window is beyond(right) of scalar crop's right edge
5526 (roi->rect.top > crop_y_max)){
5527 // top edge of roi windo is above scalar crop's top edge
5528 return false;
5529 }
5530 if (roi->rect.left < scalerCropRegion->left) {
5531 roi->rect.left = scalerCropRegion->left;
5532 }
5533 if (roi->rect.top < scalerCropRegion->top) {
5534 roi->rect.top = scalerCropRegion->top;
5535 }
5536 if (roi_x_max > crop_x_max) {
5537 roi_x_max = crop_x_max;
5538 }
5539 if (roi_y_max > crop_y_max) {
5540 roi_y_max = crop_y_max;
5541 }
5542 roi->rect.width = roi_x_max - roi->rect.left;
5543 roi->rect.height = roi_y_max - roi->rect.top;
5544 return true;
5545 }
5546
5547 /*===========================================================================
5548 * FUNCTION : convertLandmarks
5549 *
5550 * DESCRIPTION: helper method to extract the landmarks from face detection info
5551 *
5552 * PARAMETERS :
5553 * @face : cam_rect_t struct to convert
5554 * @landmarks : int32_t destination array
5555 *
5556 *
5557 *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)5558 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5559 {
5560 landmarks[0] = (int32_t)face.left_eye_center.x;
5561 landmarks[1] = (int32_t)face.left_eye_center.y;
5562 landmarks[2] = (int32_t)face.right_eye_center.x;
5563 landmarks[3] = (int32_t)face.right_eye_center.y;
5564 landmarks[4] = (int32_t)face.mouth_center.x;
5565 landmarks[5] = (int32_t)face.mouth_center.y;
5566 }
5567
5568 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5569 /*===========================================================================
5570 * FUNCTION : initCapabilities
5571 *
5572 * DESCRIPTION: initialize camera capabilities in static data struct
5573 *
5574 * PARAMETERS :
5575 * @cameraId : camera Id
5576 *
5577 * RETURN : int32_t type of status
5578 * NO_ERROR -- success
5579 * none-zero failure code
5580 *==========================================================================*/
initCapabilities(uint32_t cameraId)5581 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5582 {
5583 int rc = 0;
5584 mm_camera_vtbl_t *cameraHandle = NULL;
5585 QCamera3HeapMemory *capabilityHeap = NULL;
5586
5587 rc = camera_open((uint8_t)cameraId, &cameraHandle);
5588 if (rc || !cameraHandle) {
5589 ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5590 goto open_failed;
5591 }
5592
5593 capabilityHeap = new QCamera3HeapMemory(1);
5594 if (capabilityHeap == NULL) {
5595 ALOGE("%s: creation of capabilityHeap failed", __func__);
5596 goto heap_creation_failed;
5597 }
5598 /* Allocate memory for capability buffer */
5599 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5600 if(rc != OK) {
5601 ALOGE("%s: No memory for cappability", __func__);
5602 goto allocate_failed;
5603 }
5604
5605 /* Map memory for capability buffer */
5606 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5607 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5608 CAM_MAPPING_BUF_TYPE_CAPABILITY,
5609 capabilityHeap->getFd(0),
5610 sizeof(cam_capability_t));
5611 if(rc < 0) {
5612 ALOGE("%s: failed to map capability buffer", __func__);
5613 goto map_failed;
5614 }
5615
5616 /* Query Capability */
5617 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5618 if(rc < 0) {
5619 ALOGE("%s: failed to query capability",__func__);
5620 goto query_failed;
5621 }
5622 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5623 if (!gCamCapability[cameraId]) {
5624 ALOGE("%s: out of memory", __func__);
5625 goto query_failed;
5626 }
5627 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5628 sizeof(cam_capability_t));
5629 rc = 0;
5630
5631 query_failed:
5632 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5633 CAM_MAPPING_BUF_TYPE_CAPABILITY);
5634 map_failed:
5635 capabilityHeap->deallocate();
5636 allocate_failed:
5637 delete capabilityHeap;
5638 heap_creation_failed:
5639 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5640 cameraHandle = NULL;
5641 open_failed:
5642 return rc;
5643 }
5644
5645 /*==========================================================================
5646 * FUNCTION : get3Aversion
5647 *
5648 * DESCRIPTION: get the Q3A S/W version
5649 *
5650 * PARAMETERS :
5651 * @sw_version: Reference of Q3A structure which will hold version info upon
5652 * return
5653 *
5654 * RETURN : None
5655 *
5656 *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)5657 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5658 {
5659 if(gCamCapability[mCameraId])
5660 sw_version = gCamCapability[mCameraId]->q3a_version;
5661 else
5662 ALOGE("%s:Capability structure NULL!", __func__);
5663 }
5664
5665
5666 /*===========================================================================
5667 * FUNCTION : initParameters
5668 *
5669 * DESCRIPTION: initialize camera parameters
5670 *
5671 * PARAMETERS :
5672 *
5673 * RETURN : int32_t type of status
5674 * NO_ERROR -- success
5675 * none-zero failure code
5676 *==========================================================================*/
initParameters()5677 int QCamera3HardwareInterface::initParameters()
5678 {
5679 int rc = 0;
5680
5681 //Allocate Set Param Buffer
5682 mParamHeap = new QCamera3HeapMemory(1);
5683 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5684 if(rc != OK) {
5685 rc = NO_MEMORY;
5686 ALOGE("Failed to allocate SETPARM Heap memory");
5687 delete mParamHeap;
5688 mParamHeap = NULL;
5689 return rc;
5690 }
5691
5692 //Map memory for parameters buffer
5693 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5694 CAM_MAPPING_BUF_TYPE_PARM_BUF,
5695 mParamHeap->getFd(0),
5696 sizeof(metadata_buffer_t));
5697 if(rc < 0) {
5698 ALOGE("%s:failed to map SETPARM buffer",__func__);
5699 rc = FAILED_TRANSACTION;
5700 mParamHeap->deallocate();
5701 delete mParamHeap;
5702 mParamHeap = NULL;
5703 return rc;
5704 }
5705
5706 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5707
5708 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5709 return rc;
5710 }
5711
5712 /*===========================================================================
5713 * FUNCTION : deinitParameters
5714 *
5715 * DESCRIPTION: de-initialize camera parameters
5716 *
5717 * PARAMETERS :
5718 *
5719 * RETURN : NONE
5720 *==========================================================================*/
deinitParameters()5721 void QCamera3HardwareInterface::deinitParameters()
5722 {
5723 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5724 CAM_MAPPING_BUF_TYPE_PARM_BUF);
5725
5726 mParamHeap->deallocate();
5727 delete mParamHeap;
5728 mParamHeap = NULL;
5729
5730 mParameters = NULL;
5731
5732 free(mPrevParameters);
5733 mPrevParameters = NULL;
5734 }
5735
5736 /*===========================================================================
5737 * FUNCTION : calcMaxJpegSize
5738 *
5739 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5740 *
5741 * PARAMETERS :
5742 *
5743 * RETURN : max_jpeg_size
5744 *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)5745 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5746 {
5747 size_t max_jpeg_size = 0;
5748 size_t temp_width, temp_height;
5749 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5750 MAX_SIZES_CNT);
5751 for (size_t i = 0; i < count; i++) {
5752 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5753 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5754 if (temp_width * temp_height > max_jpeg_size ) {
5755 max_jpeg_size = temp_width * temp_height;
5756 }
5757 }
5758 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5759 return max_jpeg_size;
5760 }
5761
5762 /*===========================================================================
5763 * FUNCTION : getMaxRawSize
5764 *
5765 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5766 *
5767 * PARAMETERS :
5768 *
5769 * RETURN : Largest supported Raw Dimension
5770 *==========================================================================*/
getMaxRawSize(uint32_t camera_id)5771 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5772 {
5773 int max_width = 0;
5774 cam_dimension_t maxRawSize;
5775
5776 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5777 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5778 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5779 max_width = gCamCapability[camera_id]->raw_dim[i].width;
5780 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5781 }
5782 }
5783 return maxRawSize;
5784 }
5785
5786
5787 /*===========================================================================
5788 * FUNCTION : calcMaxJpegDim
5789 *
5790 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5791 *
5792 * PARAMETERS :
5793 *
5794 * RETURN : max_jpeg_dim
5795 *==========================================================================*/
calcMaxJpegDim()5796 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5797 {
5798 cam_dimension_t max_jpeg_dim;
5799 cam_dimension_t curr_jpeg_dim;
5800 max_jpeg_dim.width = 0;
5801 max_jpeg_dim.height = 0;
5802 curr_jpeg_dim.width = 0;
5803 curr_jpeg_dim.height = 0;
5804 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5805 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5806 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5807 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5808 max_jpeg_dim.width * max_jpeg_dim.height ) {
5809 max_jpeg_dim.width = curr_jpeg_dim.width;
5810 max_jpeg_dim.height = curr_jpeg_dim.height;
5811 }
5812 }
5813 return max_jpeg_dim;
5814 }
5815
5816 /*===========================================================================
5817 * FUNCTION : addStreamConfig
5818 *
5819 * DESCRIPTION: adds the stream configuration to the array
5820 *
5821 * PARAMETERS :
5822 * @available_stream_configs : pointer to stream configuration array
5823 * @scalar_format : scalar format
5824 * @dim : configuration dimension
5825 * @config_type : input or output configuration type
5826 *
5827 * RETURN : NONE
5828 *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)5829 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5830 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5831 {
5832 available_stream_configs.add(scalar_format);
5833 available_stream_configs.add(dim.width);
5834 available_stream_configs.add(dim.height);
5835 available_stream_configs.add(config_type);
5836 }
5837
5838
5839 /*===========================================================================
5840 * FUNCTION : initStaticMetadata
5841 *
5842 * DESCRIPTION: initialize the static metadata
5843 *
5844 * PARAMETERS :
5845 * @cameraId : camera Id
5846 *
5847 * RETURN : int32_t type of status
5848 * 0 -- success
5849 * non-zero failure code
5850 *==========================================================================*/
initStaticMetadata(uint32_t cameraId)5851 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5852 {
5853 int rc = 0;
5854 CameraMetadata staticInfo;
5855 size_t count = 0;
5856 bool limitedDevice = false;
5857 char prop[PROPERTY_VALUE_MAX];
5858
5859 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5860 * guaranteed, its advertised as limited device */
5861 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5862 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5863
5864 uint8_t supportedHwLvl = limitedDevice ?
5865 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5866 // No capability check done here to distinguish LEVEL_FULL from
5867 // LEVEL_3 - assuming this HAL will not run on devices that only
5868 // meet FULL spec
5869 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
5870
5871 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5872 &supportedHwLvl, 1);
5873
5874 bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5875 /*HAL 3 only*/
5876 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5877 &gCamCapability[cameraId]->min_focus_distance, 1);
5878
5879 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5880 &gCamCapability[cameraId]->hyper_focal_distance, 1);
5881
5882 /*should be using focal lengths but sensor doesn't provide that info now*/
5883 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5884 &gCamCapability[cameraId]->focal_length,
5885 1);
5886
5887 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5888 gCamCapability[cameraId]->apertures,
5889 gCamCapability[cameraId]->apertures_count);
5890
5891 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5892 gCamCapability[cameraId]->filter_densities,
5893 gCamCapability[cameraId]->filter_densities_count);
5894
5895
5896 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5897 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5898 gCamCapability[cameraId]->optical_stab_modes_count);
5899
5900 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5901 gCamCapability[cameraId]->lens_shading_map_size.height};
5902 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5903 lens_shading_map_size,
5904 sizeof(lens_shading_map_size)/sizeof(int32_t));
5905
5906 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5907 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5908
5909 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5910 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5911
5912 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5913 &gCamCapability[cameraId]->max_frame_duration, 1);
5914
5915 camera_metadata_rational baseGainFactor = {
5916 gCamCapability[cameraId]->base_gain_factor.numerator,
5917 gCamCapability[cameraId]->base_gain_factor.denominator};
5918 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5919 &baseGainFactor, 1);
5920
5921 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5922 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5923
5924 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5925 gCamCapability[cameraId]->pixel_array_size.height};
5926 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5927 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5928
5929 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5930 gCamCapability[cameraId]->active_array_size.top,
5931 gCamCapability[cameraId]->active_array_size.width,
5932 gCamCapability[cameraId]->active_array_size.height};
5933 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5934 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5935
5936 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5937 &gCamCapability[cameraId]->white_level, 1);
5938
5939 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5940 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5941
5942 bool hasBlackRegions = false;
5943 if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
5944 gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
5945 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
5946 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
5947 // Left
5948 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
5949 //Top
5950 opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
5951 // Width
5952 opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
5953 gCamCapability[cameraId]->optical_black_regions[i];
5954 // Height
5955 opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
5956 gCamCapability[cameraId]->optical_black_regions[i + 1];
5957 }
5958 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
5959 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
5960 hasBlackRegions = true;
5961 }
5962
5963 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5964 &gCamCapability[cameraId]->flash_charge_duration, 1);
5965
5966 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5967 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5968
5969 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
5970 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5971 ×tampSource, 1);
5972
5973 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5974 &gCamCapability[cameraId]->histogram_size, 1);
5975
5976 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5977 &gCamCapability[cameraId]->max_histogram_count, 1);
5978
5979 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5980 gCamCapability[cameraId]->sharpness_map_size.height};
5981
5982 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5983 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5984
5985 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5986 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5987
5988 int32_t scalar_formats[] = {
5989 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5990 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5991 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5992 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5993 HAL_PIXEL_FORMAT_RAW10,
5994 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5995 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5996 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5997 scalar_formats,
5998 scalar_formats_count);
5999
6000 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6001 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6002 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6003 count, MAX_SIZES_CNT, available_processed_sizes);
6004 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6005 available_processed_sizes, count * 2);
6006
6007 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6008 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6009 makeTable(gCamCapability[cameraId]->raw_dim,
6010 count, MAX_SIZES_CNT, available_raw_sizes);
6011 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6012 available_raw_sizes, count * 2);
6013
6014 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6015 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6016 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6017 count, MAX_SIZES_CNT, available_fps_ranges);
6018 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6019 available_fps_ranges, count * 2);
6020
6021 camera_metadata_rational exposureCompensationStep = {
6022 gCamCapability[cameraId]->exp_compensation_step.numerator,
6023 gCamCapability[cameraId]->exp_compensation_step.denominator};
6024 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6025 &exposureCompensationStep, 1);
6026
6027 Vector<uint8_t> availableVstabModes;
6028 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6029 char eis_prop[PROPERTY_VALUE_MAX];
6030 memset(eis_prop, 0, sizeof(eis_prop));
6031 property_get("persist.camera.eis.enable", eis_prop, "0");
6032 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6033 if (facingBack && eis_prop_set) {
6034 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6035 }
6036 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6037 availableVstabModes.array(), availableVstabModes.size());
6038
6039 /*HAL 1 and HAL 3 common*/
6040 float maxZoom = 4;
6041 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6042 &maxZoom, 1);
6043
6044 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
6045 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6046
6047 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6048 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6049 max3aRegions[2] = 0; /* AF not supported */
6050 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6051 max3aRegions, 3);
6052
6053 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6054 memset(prop, 0, sizeof(prop));
6055 property_get("persist.camera.facedetect", prop, "1");
6056 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6057 CDBG("%s: Support face detection mode: %d",
6058 __func__, supportedFaceDetectMode);
6059
6060 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6061 Vector<uint8_t> availableFaceDetectModes;
6062 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6063 if (supportedFaceDetectMode == 1) {
6064 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6065 } else if (supportedFaceDetectMode == 2) {
6066 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6067 } else if (supportedFaceDetectMode == 3) {
6068 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6069 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6070 } else {
6071 maxFaces = 0;
6072 }
6073 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6074 availableFaceDetectModes.array(),
6075 availableFaceDetectModes.size());
6076 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6077 (int32_t *)&maxFaces, 1);
6078
6079 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
6080 gCamCapability[cameraId]->exposure_compensation_max};
6081 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6082 exposureCompensationRange,
6083 sizeof(exposureCompensationRange)/sizeof(int32_t));
6084
6085 uint8_t lensFacing = (facingBack) ?
6086 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6087 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6088
6089 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6090 available_thumbnail_sizes,
6091 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6092
6093 /*all sizes will be clubbed into this tag*/
6094 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
6095 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6096 size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
6097 count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
6098 gCamCapability[cameraId]->max_downscale_factor);
6099 /*android.scaler.availableStreamConfigurations*/
6100 size_t max_stream_configs_size = count * scalar_formats_count * 4;
6101 Vector<int32_t> available_stream_configs;
6102 cam_dimension_t active_array_dim;
6103 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6104 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6105 /* Add input/output stream configurations for each scalar formats*/
6106 for (size_t j = 0; j < scalar_formats_count; j++) {
6107 switch (scalar_formats[j]) {
6108 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6109 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6110 case HAL_PIXEL_FORMAT_RAW10:
6111 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6112 addStreamConfig(available_stream_configs, scalar_formats[j],
6113 gCamCapability[cameraId]->raw_dim[i],
6114 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6115 }
6116 break;
6117 case HAL_PIXEL_FORMAT_BLOB:
6118 cam_dimension_t jpeg_size;
6119 for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
6120 jpeg_size.width = available_jpeg_sizes[i*2];
6121 jpeg_size.height = available_jpeg_sizes[i*2+1];
6122 addStreamConfig(available_stream_configs, scalar_formats[j],
6123 jpeg_size,
6124 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6125 }
6126 break;
6127 case HAL_PIXEL_FORMAT_YCbCr_420_888:
6128 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6129 default:
6130 cam_dimension_t largest_picture_size;
6131 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6132 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6133 addStreamConfig(available_stream_configs, scalar_formats[j],
6134 gCamCapability[cameraId]->picture_sizes_tbl[i],
6135 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6136 /* Book keep largest */
6137 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6138 >= largest_picture_size.width &&
6139 gCamCapability[cameraId]->picture_sizes_tbl[i].height
6140 >= largest_picture_size.height)
6141 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6142 }
6143 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6144 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6145 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6146 addStreamConfig(available_stream_configs, scalar_formats[j],
6147 largest_picture_size,
6148 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6149 }
6150 break;
6151 }
6152 }
6153
6154 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6155 available_stream_configs.array(), available_stream_configs.size());
6156 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6157 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6158
6159 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6160 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6161
6162 /* android.scaler.availableMinFrameDurations */
6163 int64_t available_min_durations[max_stream_configs_size];
6164 size_t idx = 0;
6165 for (size_t j = 0; j < scalar_formats_count; j++) {
6166 switch (scalar_formats[j]) {
6167 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6168 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6169 case HAL_PIXEL_FORMAT_RAW10:
6170 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6171 available_min_durations[idx] = scalar_formats[j];
6172 available_min_durations[idx+1] =
6173 gCamCapability[cameraId]->raw_dim[i].width;
6174 available_min_durations[idx+2] =
6175 gCamCapability[cameraId]->raw_dim[i].height;
6176 available_min_durations[idx+3] =
6177 gCamCapability[cameraId]->raw_min_duration[i];
6178 idx+=4;
6179 }
6180 break;
6181 default:
6182 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6183 available_min_durations[idx] = scalar_formats[j];
6184 available_min_durations[idx+1] =
6185 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6186 available_min_durations[idx+2] =
6187 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6188 available_min_durations[idx+3] =
6189 gCamCapability[cameraId]->picture_min_duration[i];
6190 idx+=4;
6191 }
6192 break;
6193 }
6194 }
6195 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6196 &available_min_durations[0], idx);
6197
6198 Vector<int32_t> available_hfr_configs;
6199 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6200 int32_t fps = 0;
6201 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6202 case CAM_HFR_MODE_60FPS:
6203 fps = 60;
6204 break;
6205 case CAM_HFR_MODE_90FPS:
6206 fps = 90;
6207 break;
6208 case CAM_HFR_MODE_120FPS:
6209 fps = 120;
6210 break;
6211 case CAM_HFR_MODE_150FPS:
6212 fps = 150;
6213 break;
6214 case CAM_HFR_MODE_180FPS:
6215 fps = 180;
6216 break;
6217 case CAM_HFR_MODE_210FPS:
6218 fps = 210;
6219 break;
6220 case CAM_HFR_MODE_240FPS:
6221 fps = 240;
6222 break;
6223 case CAM_HFR_MODE_480FPS:
6224 fps = 480;
6225 break;
6226 case CAM_HFR_MODE_OFF:
6227 case CAM_HFR_MODE_MAX:
6228 default:
6229 break;
6230 }
6231
6232 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6233 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6234 /* For each HFR frame rate, need to advertise one variable fps range
6235 * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6236 * [120, 120]. While camcorder preview alone is running [30, 120] is
6237 * set by the app. When video recording is started, [120, 120] is
6238 * set. This way sensor configuration does not change when recording
6239 * is started */
6240
6241 /* (width, height, fps_min, fps_max, batch_size_max) */
6242 available_hfr_configs.add(
6243 gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6244 available_hfr_configs.add(
6245 gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6246 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6247 available_hfr_configs.add(fps);
6248 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6249
6250 /* (width, height, fps_min, fps_max, batch_size_max) */
6251 available_hfr_configs.add(
6252 gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6253 available_hfr_configs.add(
6254 gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6255 available_hfr_configs.add(fps);
6256 available_hfr_configs.add(fps);
6257 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6258 }
6259 }
6260 //Advertise HFR capability only if the property is set
6261 memset(prop, 0, sizeof(prop));
6262 property_get("persist.camera.hal3hfr.enable", prop, "1");
6263 uint8_t hfrEnable = (uint8_t)atoi(prop);
6264
6265 if(hfrEnable && available_hfr_configs.array()) {
6266 staticInfo.update(
6267 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6268 available_hfr_configs.array(), available_hfr_configs.size());
6269 }
6270
6271 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6272 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6273 &max_jpeg_size, 1);
6274
6275 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6276 size_t size = 0;
6277 count = CAM_EFFECT_MODE_MAX;
6278 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6279 for (size_t i = 0; i < count; i++) {
6280 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6281 gCamCapability[cameraId]->supported_effects[i]);
6282 if (NAME_NOT_FOUND != val) {
6283 avail_effects[size] = (uint8_t)val;
6284 size++;
6285 }
6286 }
6287 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6288 avail_effects,
6289 size);
6290
6291 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6292 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6293 size_t supported_scene_modes_cnt = 0;
6294 count = CAM_SCENE_MODE_MAX;
6295 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6296 for (size_t i = 0; i < count; i++) {
6297 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6298 CAM_SCENE_MODE_OFF) {
6299 int val = lookupFwkName(SCENE_MODES_MAP,
6300 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6301 gCamCapability[cameraId]->supported_scene_modes[i]);
6302 if (NAME_NOT_FOUND != val) {
6303 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6304 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6305 supported_scene_modes_cnt++;
6306 }
6307 }
6308 }
6309 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6310 avail_scene_modes,
6311 supported_scene_modes_cnt);
6312
6313 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
6314 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6315 supported_scene_modes_cnt,
6316 CAM_SCENE_MODE_MAX,
6317 scene_mode_overrides,
6318 supported_indexes,
6319 cameraId);
6320
6321 if (supported_scene_modes_cnt == 0) {
6322 supported_scene_modes_cnt = 1;
6323 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6324 }
6325
6326 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6327 scene_mode_overrides, supported_scene_modes_cnt * 3);
6328
6329 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6330 ANDROID_CONTROL_MODE_AUTO,
6331 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6332 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6333 available_control_modes,
6334 3);
6335
6336 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6337 size = 0;
6338 count = CAM_ANTIBANDING_MODE_MAX;
6339 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6340 for (size_t i = 0; i < count; i++) {
6341 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6342 gCamCapability[cameraId]->supported_antibandings[i]);
6343 if (NAME_NOT_FOUND != val) {
6344 avail_antibanding_modes[size] = (uint8_t)val;
6345 size++;
6346 }
6347
6348 }
6349 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6350 avail_antibanding_modes,
6351 size);
6352
6353 uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6354 size = 0;
6355 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6356 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6357 if (0 == count) {
6358 avail_abberation_modes[0] =
6359 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6360 size++;
6361 } else {
6362 for (size_t i = 0; i < count; i++) {
6363 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6364 gCamCapability[cameraId]->aberration_modes[i]);
6365 if (NAME_NOT_FOUND != val) {
6366 avail_abberation_modes[size] = (uint8_t)val;
6367 size++;
6368 } else {
6369 ALOGE("%s: Invalid CAC mode %d", __func__,
6370 gCamCapability[cameraId]->aberration_modes[i]);
6371 break;
6372 }
6373 }
6374
6375 }
6376 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6377 avail_abberation_modes,
6378 size);
6379
6380 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6381 size = 0;
6382 count = CAM_FOCUS_MODE_MAX;
6383 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6384 for (size_t i = 0; i < count; i++) {
6385 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6386 gCamCapability[cameraId]->supported_focus_modes[i]);
6387 if (NAME_NOT_FOUND != val) {
6388 avail_af_modes[size] = (uint8_t)val;
6389 size++;
6390 }
6391 }
6392 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6393 avail_af_modes,
6394 size);
6395
6396 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6397 size = 0;
6398 count = CAM_WB_MODE_MAX;
6399 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6400 for (size_t i = 0; i < count; i++) {
6401 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6402 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6403 gCamCapability[cameraId]->supported_white_balances[i]);
6404 if (NAME_NOT_FOUND != val) {
6405 avail_awb_modes[size] = (uint8_t)val;
6406 size++;
6407 }
6408 }
6409 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6410 avail_awb_modes,
6411 size);
6412
6413 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6414 count = CAM_FLASH_FIRING_LEVEL_MAX;
6415 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6416 count);
6417 for (size_t i = 0; i < count; i++) {
6418 available_flash_levels[i] =
6419 gCamCapability[cameraId]->supported_firing_levels[i];
6420 }
6421 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6422 available_flash_levels, count);
6423
6424 uint8_t flashAvailable;
6425 if (gCamCapability[cameraId]->flash_available)
6426 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6427 else
6428 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6429 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6430 &flashAvailable, 1);
6431
6432 Vector<uint8_t> avail_ae_modes;
6433 count = CAM_AE_MODE_MAX;
6434 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6435 for (size_t i = 0; i < count; i++) {
6436 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6437 }
6438 if (flashAvailable) {
6439 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6440 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6441 }
6442 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6443 avail_ae_modes.array(),
6444 avail_ae_modes.size());
6445
6446 int32_t sensitivity_range[2];
6447 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6448 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6449 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6450 sensitivity_range,
6451 sizeof(sensitivity_range) / sizeof(int32_t));
6452
6453 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6454 &gCamCapability[cameraId]->max_analog_sensitivity,
6455 1);
6456
6457 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6458 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6459 &sensor_orientation,
6460 1);
6461
6462 int32_t max_output_streams[] = {
6463 MAX_STALLING_STREAMS,
6464 MAX_PROCESSED_STREAMS,
6465 MAX_RAW_STREAMS};
6466 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6467 max_output_streams,
6468 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6469
6470 uint8_t avail_leds = 0;
6471 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6472 &avail_leds, 0);
6473
6474 uint8_t focus_dist_calibrated;
6475 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6476 gCamCapability[cameraId]->focus_dist_calibrated);
6477 if (NAME_NOT_FOUND != val) {
6478 focus_dist_calibrated = (uint8_t)val;
6479 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6480 &focus_dist_calibrated, 1);
6481 }
6482
6483 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6484 size = 0;
6485 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6486 MAX_TEST_PATTERN_CNT);
6487 for (size_t i = 0; i < count; i++) {
6488 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6489 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6490 if (NAME_NOT_FOUND != testpatternMode) {
6491 avail_testpattern_modes[size] = testpatternMode;
6492 size++;
6493 }
6494 }
6495 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6496 avail_testpattern_modes,
6497 size);
6498
6499 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6500 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6501 &max_pipeline_depth,
6502 1);
6503
6504 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6505 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6506 &partial_result_count,
6507 1);
6508
6509 int32_t max_stall_duration = MAX_REPROCESS_STALL;
6510 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6511
6512 Vector<uint8_t> available_capabilities;
6513 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6514 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6515 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6517 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6518 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6519 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6520 if (hfrEnable && available_hfr_configs.array()) {
6521 available_capabilities.add(
6522 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6523 }
6524
6525 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6526 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6527 }
6528 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6529 available_capabilities.array(),
6530 available_capabilities.size());
6531
6532 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6533 //BURST_CAPTURE.
6534 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6535 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6536
6537 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6538 &aeLockAvailable, 1);
6539
6540 //awbLockAvailable to be set to true if capabilities has
6541 //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6542 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6543 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6544
6545 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6546 &awbLockAvailable, 1);
6547
6548 int32_t max_input_streams = 1;
6549 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6550 &max_input_streams,
6551 1);
6552
6553 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6554 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6555 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6556 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6557 HAL_PIXEL_FORMAT_YCbCr_420_888};
6558 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6559 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6560
6561 int32_t max_latency = (limitedDevice) ?
6562 CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6563 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6564 &max_latency,
6565 1);
6566
6567 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6568 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6569 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6570 available_hot_pixel_modes,
6571 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6572
6573 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6574 ANDROID_SHADING_MODE_FAST,
6575 ANDROID_SHADING_MODE_HIGH_QUALITY};
6576 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6577 available_shading_modes,
6578 3);
6579
6580 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6581 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6582 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6583 available_lens_shading_map_modes,
6584 2);
6585
6586 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6587 ANDROID_EDGE_MODE_FAST,
6588 ANDROID_EDGE_MODE_HIGH_QUALITY,
6589 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6590 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6591 available_edge_modes,
6592 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6593
6594 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6595 ANDROID_NOISE_REDUCTION_MODE_FAST,
6596 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6597 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6598 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6599 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6600 available_noise_red_modes,
6601 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6602
6603 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6604 ANDROID_TONEMAP_MODE_FAST,
6605 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6606 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6607 available_tonemap_modes,
6608 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6609
6610 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6611 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6612 available_hot_pixel_map_modes,
6613 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6614
6615 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6616 gCamCapability[cameraId]->reference_illuminant1);
6617 if (NAME_NOT_FOUND != val) {
6618 uint8_t fwkReferenceIlluminant = (uint8_t)val;
6619 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6620 }
6621
6622 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6623 gCamCapability[cameraId]->reference_illuminant2);
6624 if (NAME_NOT_FOUND != val) {
6625 uint8_t fwkReferenceIlluminant = (uint8_t)val;
6626 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6627 }
6628
6629 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6630 (void *)gCamCapability[cameraId]->forward_matrix1,
6631 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6632
6633 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6634 (void *)gCamCapability[cameraId]->forward_matrix2,
6635 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6636
6637 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6638 (void *)gCamCapability[cameraId]->color_transform1,
6639 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6640
6641 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6642 (void *)gCamCapability[cameraId]->color_transform2,
6643 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6644
6645 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6646 (void *)gCamCapability[cameraId]->calibration_transform1,
6647 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6648
6649 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6650 (void *)gCamCapability[cameraId]->calibration_transform2,
6651 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6652
6653 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6654 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6655 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6656 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6657 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6658 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6659 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6660 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6661 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6662 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6663 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6664 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6665 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6666 ANDROID_JPEG_GPS_COORDINATES,
6667 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6668 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6669 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6670 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6671 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6672 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6673 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6674 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6675 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6676 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6677 ANDROID_STATISTICS_FACE_DETECT_MODE,
6678 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6679 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6680 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6681 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
6682
6683 size_t request_keys_cnt =
6684 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6685 Vector<int32_t> available_request_keys;
6686 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6687 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6688 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6689 }
6690
6691 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6692 available_request_keys.array(), available_request_keys.size());
6693
6694 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6695 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6696 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6697 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6698 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6699 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6700 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6701 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6702 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6703 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6704 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6705 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6706 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6707 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6708 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6709 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6710 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6711 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6712 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6713 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6714 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6715 ANDROID_STATISTICS_FACE_SCORES,
6716 ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
6717 ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
6718 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
6719 size_t result_keys_cnt =
6720 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6721
6722 Vector<int32_t> available_result_keys;
6723 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6724 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6725 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6726 }
6727 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6728 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6729 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6730 }
6731 if (supportedFaceDetectMode == 1) {
6732 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6733 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6734 } else if ((supportedFaceDetectMode == 2) ||
6735 (supportedFaceDetectMode == 3)) {
6736 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6737 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6738 }
6739 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6740 available_result_keys.array(), available_result_keys.size());
6741
6742 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6743 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6744 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6745 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6746 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6747 ANDROID_SCALER_CROPPING_TYPE,
6748 ANDROID_SYNC_MAX_LATENCY,
6749 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6750 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6751 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6752 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6753 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6754 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6755 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6756 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6757 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6758 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6759 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6760 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6761 ANDROID_LENS_FACING,
6762 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6763 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6764 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6765 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6766 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6767 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6768 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6769 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6770 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6771 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6772 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6773 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6774 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6775 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6776 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6777 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6778 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6779 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6780 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6781 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6782 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6783 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6784 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6785 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6786 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6787 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6788 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6789 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6790 ANDROID_TONEMAP_MAX_CURVE_POINTS,
6791 ANDROID_CONTROL_AVAILABLE_MODES,
6792 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6793 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6794 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6795 ANDROID_SHADING_AVAILABLE_MODES,
6796 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6797
6798 Vector<int32_t> available_characteristics_keys;
6799 available_characteristics_keys.appendArray(characteristics_keys_basic,
6800 sizeof(characteristics_keys_basic)/sizeof(int32_t));
6801 if (hasBlackRegions) {
6802 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
6803 }
6804 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6805 available_characteristics_keys.array(),
6806 available_characteristics_keys.size());
6807
6808 /*available stall durations depend on the hw + sw and will be different for different devices */
6809 /*have to add for raw after implementation*/
6810 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6811 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6812
6813 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6814 size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6815 MAX_SIZES_CNT);
6816 size_t available_stall_size = count * 4;
6817 int64_t available_stall_durations[available_stall_size];
6818 idx = 0;
6819 for (uint32_t j = 0; j < stall_formats_count; j++) {
6820 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6821 for (uint32_t i = 0; i < count; i++) {
6822 available_stall_durations[idx] = stall_formats[j];
6823 available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6824 available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6825 available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6826 idx+=4;
6827 }
6828 } else {
6829 for (uint32_t i = 0; i < raw_count; i++) {
6830 available_stall_durations[idx] = stall_formats[j];
6831 available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6832 available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6833 available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6834 idx+=4;
6835 }
6836 }
6837 }
6838 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6839 available_stall_durations,
6840 idx);
6841 //QCAMERA3_OPAQUE_RAW
6842 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6843 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6844 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6845 case LEGACY_RAW:
6846 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6847 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6848 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6849 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6850 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6851 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6852 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6853 break;
6854 case MIPI_RAW:
6855 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6856 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6857 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6858 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6859 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6860 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6861 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6862 break;
6863 default:
6864 ALOGE("%s: unknown opaque_raw_format %d", __func__,
6865 gCamCapability[cameraId]->opaque_raw_fmt);
6866 break;
6867 }
6868 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6869
6870 int32_t strides[3*raw_count];
6871 for (size_t i = 0; i < raw_count; i++) {
6872 cam_stream_buf_plane_info_t buf_planes;
6873 strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6874 strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6875 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6876 &gCamCapability[cameraId]->padding_info, &buf_planes);
6877 strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6878 }
6879 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6880 3*raw_count);
6881
6882 gStaticMetadata[cameraId] = staticInfo.release();
6883 return rc;
6884 }
6885
6886 /*===========================================================================
6887 * FUNCTION : makeTable
6888 *
6889 * DESCRIPTION: make a table of sizes
6890 *
6891 * PARAMETERS :
6892 *
6893 *
6894 *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)6895 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6896 size_t max_size, int32_t *sizeTable)
6897 {
6898 size_t j = 0;
6899 if (size > max_size) {
6900 size = max_size;
6901 }
6902 for (size_t i = 0; i < size; i++) {
6903 sizeTable[j] = dimTable[i].width;
6904 sizeTable[j+1] = dimTable[i].height;
6905 j+=2;
6906 }
6907 }
6908
6909 /*===========================================================================
6910 * FUNCTION : makeFPSTable
6911 *
6912 * DESCRIPTION: make a table of fps ranges
6913 *
6914 * PARAMETERS :
6915 *
6916 *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)6917 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6918 size_t max_size, int32_t *fpsRangesTable)
6919 {
6920 size_t j = 0;
6921 if (size > max_size) {
6922 size = max_size;
6923 }
6924 for (size_t i = 0; i < size; i++) {
6925 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6926 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6927 j+=2;
6928 }
6929 }
6930
6931 /*===========================================================================
6932 * FUNCTION : makeOverridesList
6933 *
6934 * DESCRIPTION: make a list of scene mode overrides
6935 *
6936 * PARAMETERS :
6937 *
6938 *
6939 *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)6940 void QCamera3HardwareInterface::makeOverridesList(
6941 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6942 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6943 {
6944 /*daemon will give a list of overrides for all scene modes.
6945 However we should send the fwk only the overrides for the scene modes
6946 supported by the framework*/
6947 size_t j = 0;
6948 if (size > max_size) {
6949 size = max_size;
6950 }
6951 size_t focus_count = CAM_FOCUS_MODE_MAX;
6952 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6953 focus_count);
6954 for (size_t i = 0; i < size; i++) {
6955 bool supt = false;
6956 size_t index = supported_indexes[i];
6957 overridesList[j] = gCamCapability[camera_id]->flash_available ?
6958 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6959 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6960 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6961 overridesTable[index].awb_mode);
6962 if (NAME_NOT_FOUND != val) {
6963 overridesList[j+1] = (uint8_t)val;
6964 }
6965 uint8_t focus_override = overridesTable[index].af_mode;
6966 for (size_t k = 0; k < focus_count; k++) {
6967 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6968 supt = true;
6969 break;
6970 }
6971 }
6972 if (supt) {
6973 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6974 focus_override);
6975 if (NAME_NOT_FOUND != val) {
6976 overridesList[j+2] = (uint8_t)val;
6977 }
6978 } else {
6979 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6980 }
6981 j+=3;
6982 }
6983 }
6984
6985 /*===========================================================================
6986 * FUNCTION : filterJpegSizes
6987 *
6988 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6989 * could be downscaled to
6990 *
6991 * PARAMETERS :
6992 *
6993 * RETURN : length of jpegSizes array
6994 *==========================================================================*/
6995
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)6996 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6997 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6998 uint8_t downscale_factor)
6999 {
7000 if (0 == downscale_factor) {
7001 downscale_factor = 1;
7002 }
7003
7004 int32_t min_width = active_array_size.width / downscale_factor;
7005 int32_t min_height = active_array_size.height / downscale_factor;
7006 size_t jpegSizesCnt = 0;
7007 if (processedSizesCnt > maxCount) {
7008 processedSizesCnt = maxCount;
7009 }
7010 for (size_t i = 0; i < processedSizesCnt; i+=2) {
7011 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7012 jpegSizes[jpegSizesCnt] = processedSizes[i];
7013 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7014 jpegSizesCnt += 2;
7015 }
7016 }
7017 return jpegSizesCnt;
7018 }
7019
7020 /*===========================================================================
7021 * FUNCTION : getPreviewHalPixelFormat
7022 *
7023 * DESCRIPTION: convert the format to type recognized by framework
7024 *
7025 * PARAMETERS : format : the format from backend
7026 *
7027 ** RETURN : format recognized by framework
7028 *
7029 *==========================================================================*/
getScalarFormat(int32_t format)7030 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
7031 {
7032 int32_t halPixelFormat;
7033
7034 switch (format) {
7035 case CAM_FORMAT_YUV_420_NV12:
7036 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
7037 break;
7038 case CAM_FORMAT_YUV_420_NV21:
7039 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7040 break;
7041 case CAM_FORMAT_YUV_420_NV21_ADRENO:
7042 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
7043 break;
7044 case CAM_FORMAT_YUV_420_YV12:
7045 halPixelFormat = HAL_PIXEL_FORMAT_YV12;
7046 break;
7047 case CAM_FORMAT_YUV_422_NV16:
7048 case CAM_FORMAT_YUV_422_NV61:
7049 default:
7050 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7051 break;
7052 }
7053 return halPixelFormat;
7054 }
7055
7056 /*===========================================================================
7057 * FUNCTION : computeNoiseModelEntryS
7058 *
7059 * DESCRIPTION: function to map a given sensitivity to the S noise
7060 * model parameters in the DNG noise model.
7061 *
7062 * PARAMETERS : sens : the sensor sensitivity
7063 *
7064 ** RETURN : S (sensor amplification) noise
7065 *
7066 *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)7067 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7068 double s = gCamCapability[mCameraId]->gradient_S * sens +
7069 gCamCapability[mCameraId]->offset_S;
7070 return ((s < 0.0) ? 0.0 : s);
7071 }
7072
7073 /*===========================================================================
7074 * FUNCTION : computeNoiseModelEntryO
7075 *
7076 * DESCRIPTION: function to map a given sensitivity to the O noise
7077 * model parameters in the DNG noise model.
7078 *
7079 * PARAMETERS : sens : the sensor sensitivity
7080 *
7081 ** RETURN : O (sensor readout) noise
7082 *
7083 *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)7084 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7085 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7086 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7087 1.0 : (1.0 * sens / max_analog_sens);
7088 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7089 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7090 return ((o < 0.0) ? 0.0 : o);
7091 }
7092
7093 /*===========================================================================
7094 * FUNCTION : getSensorSensitivity
7095 *
7096 * DESCRIPTION: convert iso_mode to an integer value
7097 *
7098 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7099 *
7100 ** RETURN : sensitivity supported by sensor
7101 *
7102 *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)7103 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7104 {
7105 int32_t sensitivity;
7106
7107 switch (iso_mode) {
7108 case CAM_ISO_MODE_100:
7109 sensitivity = 100;
7110 break;
7111 case CAM_ISO_MODE_200:
7112 sensitivity = 200;
7113 break;
7114 case CAM_ISO_MODE_400:
7115 sensitivity = 400;
7116 break;
7117 case CAM_ISO_MODE_800:
7118 sensitivity = 800;
7119 break;
7120 case CAM_ISO_MODE_1600:
7121 sensitivity = 1600;
7122 break;
7123 default:
7124 sensitivity = -1;
7125 break;
7126 }
7127 return sensitivity;
7128 }
7129
7130 /*===========================================================================
7131 * FUNCTION : getCamInfo
7132 *
7133 * DESCRIPTION: query camera capabilities
7134 *
7135 * PARAMETERS :
7136 * @cameraId : camera Id
7137 * @info : camera info struct to be filled in with camera capabilities
7138 *
7139 * RETURN : int type of status
7140 * NO_ERROR -- success
7141 * none-zero failure code
7142 *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)7143 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7144 struct camera_info *info)
7145 {
7146 ATRACE_CALL();
7147 int rc = 0;
7148
7149 pthread_mutex_lock(&gCamLock);
7150 if (NULL == gCamCapability[cameraId]) {
7151 rc = initCapabilities(cameraId);
7152 if (rc < 0) {
7153 pthread_mutex_unlock(&gCamLock);
7154 return rc;
7155 }
7156 }
7157
7158 if (NULL == gStaticMetadata[cameraId]) {
7159 rc = initStaticMetadata(cameraId);
7160 if (rc < 0) {
7161 pthread_mutex_unlock(&gCamLock);
7162 return rc;
7163 }
7164 }
7165
7166 switch(gCamCapability[cameraId]->position) {
7167 case CAM_POSITION_BACK:
7168 info->facing = CAMERA_FACING_BACK;
7169 break;
7170
7171 case CAM_POSITION_FRONT:
7172 info->facing = CAMERA_FACING_FRONT;
7173 break;
7174
7175 default:
7176 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
7177 rc = -1;
7178 break;
7179 }
7180
7181
7182 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7183 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7184 info->static_camera_characteristics = gStaticMetadata[cameraId];
7185
7186 //For now assume both cameras can operate independently.
7187 info->conflicting_devices = NULL;
7188 info->conflicting_devices_length = 0;
7189
7190 //resource cost is 100 * MIN(1.0, m/M),
7191 //where m is throughput requirement with maximum stream configuration
7192 //and M is CPP maximum throughput.
7193 float max_fps = 0.0;
7194 for (uint32_t i = 0;
7195 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7196 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7197 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7198 }
7199 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7200 gCamCapability[cameraId]->active_array_size.width *
7201 gCamCapability[cameraId]->active_array_size.height * max_fps /
7202 gCamCapability[cameraId]->max_pixel_bandwidth;
7203 info->resource_cost = 100 * MIN(1.0, ratio);
7204 ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
7205 info->resource_cost);
7206
7207 pthread_mutex_unlock(&gCamLock);
7208 return rc;
7209 }
7210
7211 /*===========================================================================
7212 * FUNCTION : translateCapabilityToMetadata
7213 *
7214 * DESCRIPTION: translate the capability into camera_metadata_t
7215 *
7216 * PARAMETERS : type of the request
7217 *
7218 *
7219 * RETURN : success: camera_metadata_t*
7220 * failure: NULL
7221 *
7222 *==========================================================================*/
translateCapabilityToMetadata(int type)7223 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7224 {
7225 if (mDefaultMetadata[type] != NULL) {
7226 return mDefaultMetadata[type];
7227 }
7228 //first time we are handling this request
7229 //fill up the metadata structure using the wrapper class
7230 CameraMetadata settings;
7231 //translate from cam_capability_t to camera_metadata_tag_t
7232 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7233 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7234 int32_t defaultRequestID = 0;
7235 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7236
7237 /* OIS disable */
7238 char ois_prop[PROPERTY_VALUE_MAX];
7239 memset(ois_prop, 0, sizeof(ois_prop));
7240 property_get("persist.camera.ois.disable", ois_prop, "0");
7241 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7242
7243 /* Force video to use OIS */
7244 char videoOisProp[PROPERTY_VALUE_MAX];
7245 memset(videoOisProp, 0, sizeof(videoOisProp));
7246 property_get("persist.camera.ois.video", videoOisProp, "1");
7247 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7248
7249 // EIS enable/disable
7250 char eis_prop[PROPERTY_VALUE_MAX];
7251 memset(eis_prop, 0, sizeof(eis_prop));
7252 property_get("persist.camera.eis.enable", eis_prop, "0");
7253 const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7254
7255 // Hybrid AE enable/disable
7256 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7257 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7258 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7259 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7260
7261 const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7262 // This is a bit hacky. EIS is enabled only when the above setprop
7263 // is set to non-zero value and on back camera (for 2015 Nexus).
7264 // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7265 // configureStream is called before this function. In other words,
7266 // we cannot guarantee the app will call configureStream before
7267 // calling createDefaultRequest.
7268 const bool eisEnabled = facingBack && eis_prop_set;
7269
7270 uint8_t controlIntent = 0;
7271 uint8_t focusMode;
7272 uint8_t vsMode;
7273 uint8_t optStabMode;
7274 uint8_t cacMode;
7275 uint8_t edge_mode;
7276 uint8_t noise_red_mode;
7277 uint8_t tonemap_mode;
7278 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7279 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7280 switch (type) {
7281 case CAMERA3_TEMPLATE_PREVIEW:
7282 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7283 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7284 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7285 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7286 edge_mode = ANDROID_EDGE_MODE_FAST;
7287 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7288 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7289 break;
7290 case CAMERA3_TEMPLATE_STILL_CAPTURE:
7291 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7292 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7293 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7294 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7295 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7296 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7297 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7298 break;
7299 case CAMERA3_TEMPLATE_VIDEO_RECORD:
7300 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7301 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7302 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7303 if (eisEnabled) {
7304 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7305 }
7306 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7307 edge_mode = ANDROID_EDGE_MODE_FAST;
7308 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7309 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7310 if (forceVideoOis)
7311 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7312 break;
7313 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7314 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7315 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7316 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7317 if (eisEnabled) {
7318 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7319 }
7320 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7321 edge_mode = ANDROID_EDGE_MODE_FAST;
7322 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7323 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7324 if (forceVideoOis)
7325 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7326 break;
7327 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7328 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7329 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7330 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7331 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7332 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7333 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7334 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7335 break;
7336 case CAMERA3_TEMPLATE_MANUAL:
7337 edge_mode = ANDROID_EDGE_MODE_FAST;
7338 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7339 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7340 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7341 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7342 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7343 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7344 break;
7345 default:
7346 edge_mode = ANDROID_EDGE_MODE_FAST;
7347 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7348 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7349 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7350 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7351 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7352 break;
7353 }
7354 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7355 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7356 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7357 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7358 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7359 }
7360 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7361
7362 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7363 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7364 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7365 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7366 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7367 || ois_disable)
7368 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7369 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7370
7371 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7372 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7373
7374 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7375 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7376
7377 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7378 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7379
7380 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7381 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7382
7383 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7384 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7385
7386 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7387 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7388
7389 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7390 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7391
7392 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7393 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7394
7395 /*flash*/
7396 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7397 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7398
7399 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7400 settings.update(ANDROID_FLASH_FIRING_POWER,
7401 &flashFiringLevel, 1);
7402
7403 /* lens */
7404 float default_aperture = gCamCapability[mCameraId]->apertures[0];
7405 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7406
7407 if (gCamCapability[mCameraId]->filter_densities_count) {
7408 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7409 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7410 gCamCapability[mCameraId]->filter_densities_count);
7411 }
7412
7413 float default_focal_length = gCamCapability[mCameraId]->focal_length;
7414 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7415
7416 float default_focus_distance = 0;
7417 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7418
7419 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7420 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7421
7422 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7423 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7424
7425 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7426 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7427
7428 /* face detection (default to OFF) */
7429 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7430 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7431
7432 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7433 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7434
7435 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7436 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7437
7438 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7439 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7440
7441 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7442 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7443
7444 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7445 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7446
7447 /* Exposure time(Update the Min Exposure Time)*/
7448 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7449 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7450
7451 /* frame duration */
7452 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7453 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7454
7455 /* sensitivity */
7456 static const int32_t default_sensitivity = 100;
7457 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7458
7459 /*edge mode*/
7460 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7461
7462 /*noise reduction mode*/
7463 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7464
7465 /*color correction mode*/
7466 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7467 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7468
7469 /*transform matrix mode*/
7470 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7471
7472 int32_t scaler_crop_region[4];
7473 scaler_crop_region[0] = 0;
7474 scaler_crop_region[1] = 0;
7475 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7476 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7477 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7478
7479 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7480 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7481
7482 /*focus distance*/
7483 float focus_distance = 0.0;
7484 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7485
7486 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7487 float max_range = 0.0;
7488 float max_fixed_fps = 0.0;
7489 int32_t fps_range[2] = {0, 0};
7490 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7491 i++) {
7492 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7493 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7494 if (type == CAMERA3_TEMPLATE_PREVIEW ||
7495 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7496 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7497 if (range > max_range) {
7498 fps_range[0] =
7499 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7500 fps_range[1] =
7501 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7502 max_range = range;
7503 }
7504 } else {
7505 if (range < 0.01 && max_fixed_fps <
7506 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7507 fps_range[0] =
7508 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7509 fps_range[1] =
7510 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7511 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7512 }
7513 }
7514 }
7515 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7516
7517 /*precapture trigger*/
7518 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7519 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7520
7521 /*af trigger*/
7522 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7523 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7524
7525 /* ae & af regions */
7526 int32_t active_region[] = {
7527 gCamCapability[mCameraId]->active_array_size.left,
7528 gCamCapability[mCameraId]->active_array_size.top,
7529 gCamCapability[mCameraId]->active_array_size.left +
7530 gCamCapability[mCameraId]->active_array_size.width,
7531 gCamCapability[mCameraId]->active_array_size.top +
7532 gCamCapability[mCameraId]->active_array_size.height,
7533 0};
7534 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7535 sizeof(active_region) / sizeof(active_region[0]));
7536 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7537 sizeof(active_region) / sizeof(active_region[0]));
7538
7539 /* black level lock */
7540 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7541 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7542
7543 /* lens shading map mode */
7544 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7545 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7546 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7547 }
7548 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7549
7550 //special defaults for manual template
7551 if (type == CAMERA3_TEMPLATE_MANUAL) {
7552 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7553 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7554
7555 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7556 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7557
7558 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7559 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7560
7561 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7562 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7563
7564 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7565 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7566
7567 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7568 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7569 }
7570
7571
7572 /* TNR
7573 * We'll use this location to determine which modes TNR will be set.
7574 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7575 * This is not to be confused with linking on a per stream basis that decision
7576 * is still on per-session basis and will be handled as part of config stream
7577 */
7578 uint8_t tnr_enable = 0;
7579
7580 if (m_bTnrPreview || m_bTnrVideo) {
7581
7582 switch (type) {
7583 case CAMERA3_TEMPLATE_VIDEO_RECORD:
7584 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7585 tnr_enable = 1;
7586 break;
7587
7588 default:
7589 tnr_enable = 0;
7590 break;
7591 }
7592
7593 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7594 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7595 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7596
7597 CDBG("%s: TNR:%d with process plate %d for template:%d",
7598 __func__, tnr_enable, tnr_process_type, type);
7599 }
7600
7601 /* CDS default */
7602 char prop[PROPERTY_VALUE_MAX];
7603 memset(prop, 0, sizeof(prop));
7604 property_get("persist.camera.CDS", prop, "Auto");
7605 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7606 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7607 if (CAM_CDS_MODE_MAX == cds_mode) {
7608 cds_mode = CAM_CDS_MODE_AUTO;
7609 }
7610 m_CdsPreference = cds_mode;
7611
7612 /* Disabling CDS in templates which have TNR enabled*/
7613 if (tnr_enable)
7614 cds_mode = CAM_CDS_MODE_OFF;
7615
7616 int32_t mode = cds_mode;
7617 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7618
7619 /* hybrid ae */
7620 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7621
7622 mDefaultMetadata[type] = settings.release();
7623
7624 return mDefaultMetadata[type];
7625 }
7626
7627 /*===========================================================================
7628 * FUNCTION : setFrameParameters
7629 *
7630 * DESCRIPTION: set parameters per frame as requested in the metadata from
7631 * framework
7632 *
7633 * PARAMETERS :
7634 * @request : request that needs to be serviced
7635 * @streamID : Stream ID of all the requested streams
7636 * @blob_request: Whether this request is a blob request or not
7637 *
7638 * RETURN : success: NO_ERROR
7639 * failure:
7640 *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,int blob_request,uint32_t snapshotStreamId)7641 int QCamera3HardwareInterface::setFrameParameters(
7642 camera3_capture_request_t *request,
7643 cam_stream_ID_t streamID,
7644 int blob_request,
7645 uint32_t snapshotStreamId)
7646 {
7647 /*translate from camera_metadata_t type to parm_type_t*/
7648 int rc = 0;
7649 int32_t hal_version = CAM_HAL_V3;
7650
7651 clear_metadata_buffer(mParameters);
7652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7653 ALOGE("%s: Failed to set hal version in the parameters", __func__);
7654 return BAD_VALUE;
7655 }
7656
7657 /*we need to update the frame number in the parameters*/
7658 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7659 request->frame_number)) {
7660 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7661 return BAD_VALUE;
7662 }
7663
7664 /* Update stream id of all the requested buffers */
7665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7666 ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7667 return BAD_VALUE;
7668 }
7669
7670 if (mUpdateDebugLevel) {
7671 uint32_t dummyDebugLevel = 0;
7672 /* The value of dummyDebugLevel is irrelavent. On
7673 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7675 dummyDebugLevel)) {
7676 ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7677 return BAD_VALUE;
7678 }
7679 mUpdateDebugLevel = false;
7680 }
7681
7682 if(request->settings != NULL){
7683 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7684 if (blob_request)
7685 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7686 }
7687
7688 return rc;
7689 }
7690
7691 /*===========================================================================
7692 * FUNCTION : setReprocParameters
7693 *
7694 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7695 * return it.
7696 *
7697 * PARAMETERS :
7698 * @request : request that needs to be serviced
7699 *
7700 * RETURN : success: NO_ERROR
7701 * failure:
7702 *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)7703 int32_t QCamera3HardwareInterface::setReprocParameters(
7704 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7705 uint32_t snapshotStreamId)
7706 {
7707 /*translate from camera_metadata_t type to parm_type_t*/
7708 int rc = 0;
7709
7710 if (NULL == request->settings){
7711 ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7712 return BAD_VALUE;
7713 }
7714
7715 if (NULL == reprocParam) {
7716 ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7717 return BAD_VALUE;
7718 }
7719 clear_metadata_buffer(reprocParam);
7720
7721 /*we need to update the frame number in the parameters*/
7722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7723 request->frame_number)) {
7724 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7725 return BAD_VALUE;
7726 }
7727
7728 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7729 if (rc < 0) {
7730 ALOGE("%s: Failed to translate reproc request", __func__);
7731 return rc;
7732 }
7733
7734 CameraMetadata frame_settings;
7735 frame_settings = request->settings;
7736 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7737 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7738 int32_t *crop_count =
7739 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7740 int32_t *crop_data =
7741 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7742 int32_t *roi_map =
7743 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7744 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7745 cam_crop_data_t crop_meta;
7746 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7747 crop_meta.num_of_streams = 1;
7748 crop_meta.crop_info[0].crop.left = crop_data[0];
7749 crop_meta.crop_info[0].crop.top = crop_data[1];
7750 crop_meta.crop_info[0].crop.width = crop_data[2];
7751 crop_meta.crop_info[0].crop.height = crop_data[3];
7752
7753 crop_meta.crop_info[0].roi_map.left =
7754 roi_map[0];
7755 crop_meta.crop_info[0].roi_map.top =
7756 roi_map[1];
7757 crop_meta.crop_info[0].roi_map.width =
7758 roi_map[2];
7759 crop_meta.crop_info[0].roi_map.height =
7760 roi_map[3];
7761
7762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7763 rc = BAD_VALUE;
7764 }
7765 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7766 __func__,
7767 request->input_buffer->stream,
7768 crop_meta.crop_info[0].crop.left,
7769 crop_meta.crop_info[0].crop.top,
7770 crop_meta.crop_info[0].crop.width,
7771 crop_meta.crop_info[0].crop.height);
7772 CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7773 __func__,
7774 request->input_buffer->stream,
7775 crop_meta.crop_info[0].roi_map.left,
7776 crop_meta.crop_info[0].roi_map.top,
7777 crop_meta.crop_info[0].roi_map.width,
7778 crop_meta.crop_info[0].roi_map.height);
7779 } else {
7780 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7781 }
7782 } else {
7783 ALOGE("%s: No crop data from matching output stream", __func__);
7784 }
7785
7786 /* These settings are not needed for regular requests so handle them specially for
7787 reprocess requests; information needed for EXIF tags */
7788 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7789 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7790 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7791 if (NAME_NOT_FOUND != val) {
7792 uint32_t flashMode = (uint32_t)val;
7793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7794 rc = BAD_VALUE;
7795 }
7796 } else {
7797 ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7798 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7799 }
7800 } else {
7801 CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7802 }
7803
7804 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7805 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7807 rc = BAD_VALUE;
7808 }
7809 } else {
7810 CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7811 }
7812
7813 return rc;
7814 }
7815
7816 /*===========================================================================
7817 * FUNCTION : saveRequestSettings
7818 *
7819 * DESCRIPTION: Add any settings that might have changed to the request settings
7820 * and save the settings to be applied on the frame
7821 *
7822 * PARAMETERS :
7823 * @jpegMetadata : the extracted and/or modified jpeg metadata
7824 * @request : request with initial settings
7825 *
7826 * RETURN :
7827 * camera_metadata_t* : pointer to the saved request settings
7828 *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)7829 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7830 const CameraMetadata &jpegMetadata,
7831 camera3_capture_request_t *request)
7832 {
7833 camera_metadata_t *resultMetadata;
7834 CameraMetadata camMetadata;
7835 camMetadata = request->settings;
7836
7837 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7838 int32_t thumbnail_size[2];
7839 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7840 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7841 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7842 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7843 }
7844
7845 resultMetadata = camMetadata.release();
7846 return resultMetadata;
7847 }
7848
7849 /*===========================================================================
7850 * FUNCTION : setHalFpsRange
7851 *
7852 * DESCRIPTION: set FPS range parameter
7853 *
7854 *
7855 * PARAMETERS :
7856 * @settings : Metadata from framework
7857 * @hal_metadata: Metadata buffer
7858 *
7859 *
7860 * RETURN : success: NO_ERROR
7861 * failure:
7862 *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)7863 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7864 metadata_buffer_t *hal_metadata)
7865 {
7866 int32_t rc = NO_ERROR;
7867 cam_fps_range_t fps_range;
7868 fps_range.min_fps = (float)
7869 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7870 fps_range.max_fps = (float)
7871 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7872 fps_range.video_min_fps = fps_range.min_fps;
7873 fps_range.video_max_fps = fps_range.max_fps;
7874
7875 CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7876 fps_range.min_fps, fps_range.max_fps);
7877 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7878 * follows:
7879 * ---------------------------------------------------------------|
7880 * Video stream is absent in configure_streams |
7881 * (Camcorder preview before the first video record |
7882 * ---------------------------------------------------------------|
7883 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7884 * | | | vid_min/max_fps|
7885 * ---------------------------------------------------------------|
7886 * NO | [ 30, 240] | 240 | [240, 240] |
7887 * |-------------|-------------|----------------|
7888 * | [240, 240] | 240 | [240, 240] |
7889 * ---------------------------------------------------------------|
7890 * Video stream is present in configure_streams |
7891 * ---------------------------------------------------------------|
7892 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7893 * | | | vid_min/max_fps|
7894 * ---------------------------------------------------------------|
7895 * NO | [ 30, 240] | 240 | [240, 240] |
7896 * (camcorder prev |-------------|-------------|----------------|
7897 * after video rec | [240, 240] | 240 | [240, 240] |
7898 * is stopped) | | | |
7899 * ---------------------------------------------------------------|
7900 * YES | [ 30, 240] | 240 | [240, 240] |
7901 * |-------------|-------------|----------------|
7902 * | [240, 240] | 240 | [240, 240] |
7903 * ---------------------------------------------------------------|
7904 * When Video stream is absent in configure_streams,
7905 * preview fps = sensor_fps / batchsize
7906 * Eg: for 240fps at batchSize 4, preview = 60fps
7907 * for 120fps at batchSize 4, preview = 30fps
7908 *
7909 * When video stream is present in configure_streams, preview fps is as per
7910 * the ratio of preview buffers to video buffers requested in process
7911 * capture request
7912 */
7913 mBatchSize = 0;
7914 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7915 fps_range.min_fps = fps_range.video_max_fps;
7916 fps_range.video_min_fps = fps_range.video_max_fps;
7917 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7918 fps_range.max_fps);
7919 if (NAME_NOT_FOUND != val) {
7920 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7922 return BAD_VALUE;
7923 }
7924
7925 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7926 /* If batchmode is currently in progress and the fps changes,
7927 * set the flag to restart the sensor */
7928 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7929 (mHFRVideoFps != fps_range.max_fps)) {
7930 mNeedSensorRestart = true;
7931 }
7932 mHFRVideoFps = fps_range.max_fps;
7933 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7934 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7935 mBatchSize = MAX_HFR_BATCH_SIZE;
7936 }
7937 }
7938 CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7939
7940 }
7941 } else {
7942 /* HFR mode is session param in backend/ISP. This should be reset when
7943 * in non-HFR mode */
7944 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7945 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7946 return BAD_VALUE;
7947 }
7948 }
7949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7950 return BAD_VALUE;
7951 }
7952 CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7953 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7954 return rc;
7955 }
7956
7957 /*===========================================================================
7958 * FUNCTION : translateToHalMetadata
7959 *
7960 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7961 *
7962 *
7963 * PARAMETERS :
7964 * @request : request sent from framework
7965 *
7966 *
7967 * RETURN : success: NO_ERROR
7968 * failure:
7969 *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)7970 int QCamera3HardwareInterface::translateToHalMetadata
7971 (const camera3_capture_request_t *request,
7972 metadata_buffer_t *hal_metadata,
7973 uint32_t snapshotStreamId)
7974 {
7975 int rc = 0;
7976 CameraMetadata frame_settings;
7977 frame_settings = request->settings;
7978
7979 /* Do not change the order of the following list unless you know what you are
7980 * doing.
7981 * The order is laid out in such a way that parameters in the front of the table
7982 * may be used to override the parameters later in the table. Examples are:
7983 * 1. META_MODE should precede AEC/AWB/AF MODE
7984 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7985 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7986 * 4. Any mode should precede it's corresponding settings
7987 */
7988 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7989 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7991 rc = BAD_VALUE;
7992 }
7993 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7994 if (rc != NO_ERROR) {
7995 ALOGE("%s: extractSceneMode failed", __func__);
7996 }
7997 }
7998
7999 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8000 uint8_t fwk_aeMode =
8001 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8002 uint8_t aeMode;
8003 int32_t redeye;
8004
8005 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8006 aeMode = CAM_AE_MODE_OFF;
8007 } else {
8008 aeMode = CAM_AE_MODE_ON;
8009 }
8010 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8011 redeye = 1;
8012 } else {
8013 redeye = 0;
8014 }
8015
8016 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8017 fwk_aeMode);
8018 if (NAME_NOT_FOUND != val) {
8019 int32_t flashMode = (int32_t)val;
8020 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8021 }
8022
8023 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8025 rc = BAD_VALUE;
8026 }
8027 }
8028
8029 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8030 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8031 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8032 fwk_whiteLevel);
8033 if (NAME_NOT_FOUND != val) {
8034 uint8_t whiteLevel = (uint8_t)val;
8035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8036 rc = BAD_VALUE;
8037 }
8038 }
8039 }
8040
8041 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8042 uint8_t fwk_cacMode =
8043 frame_settings.find(
8044 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8045 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8046 fwk_cacMode);
8047 if (NAME_NOT_FOUND != val) {
8048 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8050 rc = BAD_VALUE;
8051 }
8052 } else {
8053 ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
8054 }
8055 }
8056
8057 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8058 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8059 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8060 fwk_focusMode);
8061 if (NAME_NOT_FOUND != val) {
8062 uint8_t focusMode = (uint8_t)val;
8063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8064 rc = BAD_VALUE;
8065 }
8066 }
8067 }
8068
8069 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8070 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8072 focalDistance)) {
8073 rc = BAD_VALUE;
8074 }
8075 }
8076
8077 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8078 uint8_t fwk_antibandingMode =
8079 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8080 int val = lookupHalName(ANTIBANDING_MODES_MAP,
8081 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8082 if (NAME_NOT_FOUND != val) {
8083 uint32_t hal_antibandingMode = (uint32_t)val;
8084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8085 hal_antibandingMode)) {
8086 rc = BAD_VALUE;
8087 }
8088 }
8089 }
8090
8091 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8092 int32_t expCompensation = frame_settings.find(
8093 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8094 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8095 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8096 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8097 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8099 expCompensation)) {
8100 rc = BAD_VALUE;
8101 }
8102 }
8103
8104 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8105 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8107 rc = BAD_VALUE;
8108 }
8109 }
8110 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8111 rc = setHalFpsRange(frame_settings, hal_metadata);
8112 if (rc != NO_ERROR) {
8113 ALOGE("%s: setHalFpsRange failed", __func__);
8114 }
8115 }
8116
8117 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8118 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8120 rc = BAD_VALUE;
8121 }
8122 }
8123
8124 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8125 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8126 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8127 fwk_effectMode);
8128 if (NAME_NOT_FOUND != val) {
8129 uint8_t effectMode = (uint8_t)val;
8130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8131 rc = BAD_VALUE;
8132 }
8133 }
8134 }
8135
8136 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8137 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8139 colorCorrectMode)) {
8140 rc = BAD_VALUE;
8141 }
8142 }
8143
8144 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8145 cam_color_correct_gains_t colorCorrectGains;
8146 for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8147 colorCorrectGains.gains[i] =
8148 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8149 }
8150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8151 colorCorrectGains)) {
8152 rc = BAD_VALUE;
8153 }
8154 }
8155
8156 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8157 cam_color_correct_matrix_t colorCorrectTransform;
8158 cam_rational_type_t transform_elem;
8159 size_t num = 0;
8160 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8161 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8162 transform_elem.numerator =
8163 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8164 transform_elem.denominator =
8165 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8166 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8167 num++;
8168 }
8169 }
8170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8171 colorCorrectTransform)) {
8172 rc = BAD_VALUE;
8173 }
8174 }
8175
8176 cam_trigger_t aecTrigger;
8177 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8178 aecTrigger.trigger_id = -1;
8179 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8180 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8181 aecTrigger.trigger =
8182 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8183 aecTrigger.trigger_id =
8184 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8186 aecTrigger)) {
8187 rc = BAD_VALUE;
8188 }
8189 CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
8190 aecTrigger.trigger, aecTrigger.trigger_id);
8191 }
8192
8193 /*af_trigger must come with a trigger id*/
8194 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8195 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8196 cam_trigger_t af_trigger;
8197 af_trigger.trigger =
8198 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8199 af_trigger.trigger_id =
8200 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8202 rc = BAD_VALUE;
8203 }
8204 CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
8205 af_trigger.trigger, af_trigger.trigger_id);
8206 }
8207
8208 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8209 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8211 rc = BAD_VALUE;
8212 }
8213 }
8214 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8215 cam_edge_application_t edge_application;
8216 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8217 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8218 edge_application.sharpness = 0;
8219 } else {
8220 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8221 }
8222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8223 rc = BAD_VALUE;
8224 }
8225 }
8226
8227 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8228 int32_t respectFlashMode = 1;
8229 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8230 uint8_t fwk_aeMode =
8231 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8232 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8233 respectFlashMode = 0;
8234 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
8235 __func__);
8236 }
8237 }
8238 if (respectFlashMode) {
8239 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8240 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8241 CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
8242 // To check: CAM_INTF_META_FLASH_MODE usage
8243 if (NAME_NOT_FOUND != val) {
8244 uint8_t flashMode = (uint8_t)val;
8245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8246 rc = BAD_VALUE;
8247 }
8248 }
8249 }
8250 }
8251
8252 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8253 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8255 rc = BAD_VALUE;
8256 }
8257 }
8258
8259 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8260 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8262 flashFiringTime)) {
8263 rc = BAD_VALUE;
8264 }
8265 }
8266
8267 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8268 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8270 hotPixelMode)) {
8271 rc = BAD_VALUE;
8272 }
8273 }
8274
8275 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8276 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8278 lensAperture)) {
8279 rc = BAD_VALUE;
8280 }
8281 }
8282
8283 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8284 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8286 filterDensity)) {
8287 rc = BAD_VALUE;
8288 }
8289 }
8290
8291 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8292 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8294 focalLength)) {
8295 rc = BAD_VALUE;
8296 }
8297 }
8298
8299 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8300 uint8_t optStabMode =
8301 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8303 optStabMode)) {
8304 rc = BAD_VALUE;
8305 }
8306 }
8307
8308 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8309 uint8_t videoStabMode =
8310 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8312 videoStabMode)) {
8313 rc = BAD_VALUE;
8314 }
8315 }
8316
8317
8318 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8319 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8321 noiseRedMode)) {
8322 rc = BAD_VALUE;
8323 }
8324 }
8325
8326 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8327 float reprocessEffectiveExposureFactor =
8328 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8330 reprocessEffectiveExposureFactor)) {
8331 rc = BAD_VALUE;
8332 }
8333 }
8334
8335 cam_crop_region_t scalerCropRegion;
8336 bool scalerCropSet = false;
8337 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8338 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8339 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8340 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8341 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8342
8343 // Map coordinate system from active array to sensor output.
8344 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8345 scalerCropRegion.width, scalerCropRegion.height);
8346
8347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8348 scalerCropRegion)) {
8349 rc = BAD_VALUE;
8350 }
8351 scalerCropSet = true;
8352 }
8353
8354 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8355 int64_t sensorExpTime =
8356 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8357 CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8359 sensorExpTime)) {
8360 rc = BAD_VALUE;
8361 }
8362 }
8363
8364 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8365 int64_t sensorFrameDuration =
8366 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8367 int64_t minFrameDuration = getMinFrameDuration(request);
8368 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8369 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8370 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8371 CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8373 sensorFrameDuration)) {
8374 rc = BAD_VALUE;
8375 }
8376 }
8377
8378 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8379 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8380 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8381 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8382 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8383 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8384 CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8385 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8386 sensorSensitivity)) {
8387 rc = BAD_VALUE;
8388 }
8389 }
8390
8391 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8392 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8394 rc = BAD_VALUE;
8395 }
8396 }
8397
8398 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8399 uint8_t fwk_facedetectMode =
8400 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8401
8402 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8403 fwk_facedetectMode);
8404
8405 if (NAME_NOT_FOUND != val) {
8406 uint8_t facedetectMode = (uint8_t)val;
8407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8408 facedetectMode)) {
8409 rc = BAD_VALUE;
8410 }
8411 }
8412 }
8413
8414 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8415 uint8_t histogramMode =
8416 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8418 histogramMode)) {
8419 rc = BAD_VALUE;
8420 }
8421 }
8422
8423 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8424 uint8_t sharpnessMapMode =
8425 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8427 sharpnessMapMode)) {
8428 rc = BAD_VALUE;
8429 }
8430 }
8431
8432 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8433 uint8_t tonemapMode =
8434 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8436 rc = BAD_VALUE;
8437 }
8438 }
8439 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8440 /*All tonemap channels will have the same number of points*/
8441 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8442 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8443 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8444 cam_rgb_tonemap_curves tonemapCurves;
8445 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8446 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8447 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8448 __func__, tonemapCurves.tonemap_points_cnt,
8449 CAM_MAX_TONEMAP_CURVE_SIZE);
8450 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8451 }
8452
8453 /* ch0 = G*/
8454 size_t point = 0;
8455 cam_tonemap_curve_t tonemapCurveGreen;
8456 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8457 for (size_t j = 0; j < 2; j++) {
8458 tonemapCurveGreen.tonemap_points[i][j] =
8459 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8460 point++;
8461 }
8462 }
8463 tonemapCurves.curves[0] = tonemapCurveGreen;
8464
8465 /* ch 1 = B */
8466 point = 0;
8467 cam_tonemap_curve_t tonemapCurveBlue;
8468 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8469 for (size_t j = 0; j < 2; j++) {
8470 tonemapCurveBlue.tonemap_points[i][j] =
8471 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8472 point++;
8473 }
8474 }
8475 tonemapCurves.curves[1] = tonemapCurveBlue;
8476
8477 /* ch 2 = R */
8478 point = 0;
8479 cam_tonemap_curve_t tonemapCurveRed;
8480 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8481 for (size_t j = 0; j < 2; j++) {
8482 tonemapCurveRed.tonemap_points[i][j] =
8483 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8484 point++;
8485 }
8486 }
8487 tonemapCurves.curves[2] = tonemapCurveRed;
8488
8489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8490 tonemapCurves)) {
8491 rc = BAD_VALUE;
8492 }
8493 }
8494
8495 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8496 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8497 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8498 captureIntent)) {
8499 rc = BAD_VALUE;
8500 }
8501 }
8502
8503 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8504 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8506 blackLevelLock)) {
8507 rc = BAD_VALUE;
8508 }
8509 }
8510
8511 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8512 uint8_t lensShadingMapMode =
8513 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8515 lensShadingMapMode)) {
8516 rc = BAD_VALUE;
8517 }
8518 }
8519
8520 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8521 cam_area_t roi;
8522 bool reset = true;
8523 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8524
8525 // Map coordinate system from active array to sensor output.
8526 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8527 roi.rect.height);
8528
8529 if (scalerCropSet) {
8530 reset = resetIfNeededROI(&roi, &scalerCropRegion);
8531 }
8532 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8533 rc = BAD_VALUE;
8534 }
8535 }
8536
8537 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8538 cam_area_t roi;
8539 bool reset = true;
8540 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8541
8542 // Map coordinate system from active array to sensor output.
8543 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8544 roi.rect.height);
8545
8546 if (scalerCropSet) {
8547 reset = resetIfNeededROI(&roi, &scalerCropRegion);
8548 }
8549 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8550 rc = BAD_VALUE;
8551 }
8552 }
8553
8554 if (m_bIs4KVideo) {
8555 /* Override needed for Video template in case of 4K video */
8556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8557 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8558 rc = BAD_VALUE;
8559 }
8560 } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8561 frame_settings.exists(QCAMERA3_CDS_MODE)) {
8562 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8563 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8564 ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8565 } else {
8566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8567 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8568 rc = BAD_VALUE;
8569 }
8570 }
8571 }
8572
8573 // TNR
8574 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8575 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8576 uint8_t b_TnrRequested = 0;
8577 cam_denoise_param_t tnr;
8578 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8579 tnr.process_plates =
8580 (cam_denoise_process_type_t)frame_settings.find(
8581 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8582 b_TnrRequested = tnr.denoise_enable;
8583 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8584 rc = BAD_VALUE;
8585 }
8586 }
8587
8588 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8589 int32_t fwk_testPatternMode =
8590 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8591 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8592 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8593
8594 if (NAME_NOT_FOUND != testPatternMode) {
8595 cam_test_pattern_data_t testPatternData;
8596 memset(&testPatternData, 0, sizeof(testPatternData));
8597 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8598 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8599 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8600 int32_t *fwk_testPatternData =
8601 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8602 testPatternData.r = fwk_testPatternData[0];
8603 testPatternData.b = fwk_testPatternData[3];
8604 switch (gCamCapability[mCameraId]->color_arrangement) {
8605 case CAM_FILTER_ARRANGEMENT_RGGB:
8606 case CAM_FILTER_ARRANGEMENT_GRBG:
8607 testPatternData.gr = fwk_testPatternData[1];
8608 testPatternData.gb = fwk_testPatternData[2];
8609 break;
8610 case CAM_FILTER_ARRANGEMENT_GBRG:
8611 case CAM_FILTER_ARRANGEMENT_BGGR:
8612 testPatternData.gr = fwk_testPatternData[2];
8613 testPatternData.gb = fwk_testPatternData[1];
8614 break;
8615 default:
8616 ALOGE("%s: color arrangement %d is not supported", __func__,
8617 gCamCapability[mCameraId]->color_arrangement);
8618 break;
8619 }
8620 }
8621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8622 testPatternData)) {
8623 rc = BAD_VALUE;
8624 }
8625 } else {
8626 ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8627 fwk_testPatternMode);
8628 }
8629 }
8630
8631 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8632 size_t count = 0;
8633 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8634 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8635 gps_coords.data.d, gps_coords.count, count);
8636 if (gps_coords.count != count) {
8637 rc = BAD_VALUE;
8638 }
8639 }
8640
8641 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8642 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8643 size_t count = 0;
8644 const char *gps_methods_src = (const char *)
8645 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8646 memset(gps_methods, '\0', sizeof(gps_methods));
8647 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8648 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8649 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8650 if (GPS_PROCESSING_METHOD_SIZE != count) {
8651 rc = BAD_VALUE;
8652 }
8653 }
8654
8655 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8656 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8657 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8658 gps_timestamp)) {
8659 rc = BAD_VALUE;
8660 }
8661 }
8662
8663 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8664 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8665 cam_rotation_info_t rotation_info;
8666 if (orientation == 0) {
8667 rotation_info.rotation = ROTATE_0;
8668 } else if (orientation == 90) {
8669 rotation_info.rotation = ROTATE_90;
8670 } else if (orientation == 180) {
8671 rotation_info.rotation = ROTATE_180;
8672 } else if (orientation == 270) {
8673 rotation_info.rotation = ROTATE_270;
8674 }
8675 rotation_info.streamId = snapshotStreamId;
8676 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8678 rc = BAD_VALUE;
8679 }
8680 }
8681
8682 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8683 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8685 rc = BAD_VALUE;
8686 }
8687 }
8688
8689 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8690 uint32_t thumb_quality = (uint32_t)
8691 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8693 thumb_quality)) {
8694 rc = BAD_VALUE;
8695 }
8696 }
8697
8698 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8699 cam_dimension_t dim;
8700 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8701 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8703 rc = BAD_VALUE;
8704 }
8705 }
8706
8707 // Internal metadata
8708 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8709 size_t count = 0;
8710 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8711 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8712 privatedata.data.i32, privatedata.count, count);
8713 if (privatedata.count != count) {
8714 rc = BAD_VALUE;
8715 }
8716 }
8717
8718 if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8719 uint8_t* use_av_timer =
8720 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8722 rc = BAD_VALUE;
8723 }
8724 }
8725
8726 // EV step
8727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8728 gCamCapability[mCameraId]->exp_compensation_step)) {
8729 rc = BAD_VALUE;
8730 }
8731
8732 // CDS info
8733 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8734 cam_cds_data_t *cdsData = (cam_cds_data_t *)
8735 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8736
8737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8738 CAM_INTF_META_CDS_DATA, *cdsData)) {
8739 rc = BAD_VALUE;
8740 }
8741 }
8742
8743 // Hybrid AE
8744 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
8745 uint8_t *hybrid_ae = (uint8_t *)
8746 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
8747
8748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8749 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
8750 rc = BAD_VALUE;
8751 }
8752 }
8753
8754 return rc;
8755 }
8756
8757 /*===========================================================================
8758 * FUNCTION : captureResultCb
8759 *
8760 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8761 *
8762 * PARAMETERS :
8763 * @frame : frame information from mm-camera-interface
8764 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8765 * @userdata: userdata
8766 *
8767 * RETURN : NONE
8768 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)8769 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8770 camera3_stream_buffer_t *buffer,
8771 uint32_t frame_number, bool isInputBuffer, void *userdata)
8772 {
8773 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8774 if (hw == NULL) {
8775 ALOGE("%s: Invalid hw %p", __func__, hw);
8776 return;
8777 }
8778
8779 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8780 return;
8781 }
8782
8783
8784 /*===========================================================================
8785 * FUNCTION : initialize
8786 *
8787 * DESCRIPTION: Pass framework callback pointers to HAL
8788 *
8789 * PARAMETERS :
8790 *
8791 *
8792 * RETURN : Success : 0
8793 * Failure: -ENODEV
8794 *==========================================================================*/
8795
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)8796 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8797 const camera3_callback_ops_t *callback_ops)
8798 {
8799 CDBG("%s: E", __func__);
8800 QCamera3HardwareInterface *hw =
8801 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8802 if (!hw) {
8803 ALOGE("%s: NULL camera device", __func__);
8804 return -ENODEV;
8805 }
8806
8807 int rc = hw->initialize(callback_ops);
8808 CDBG("%s: X", __func__);
8809 return rc;
8810 }
8811
8812 /*===========================================================================
8813 * FUNCTION : configure_streams
8814 *
8815 * DESCRIPTION:
8816 *
8817 * PARAMETERS :
8818 *
8819 *
8820 * RETURN : Success: 0
8821 * Failure: -EINVAL (if stream configuration is invalid)
8822 * -ENODEV (fatal error)
8823 *==========================================================================*/
8824
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)8825 int QCamera3HardwareInterface::configure_streams(
8826 const struct camera3_device *device,
8827 camera3_stream_configuration_t *stream_list)
8828 {
8829 CDBG("%s: E", __func__);
8830 QCamera3HardwareInterface *hw =
8831 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8832 if (!hw) {
8833 ALOGE("%s: NULL camera device", __func__);
8834 return -ENODEV;
8835 }
8836 int rc = hw->configureStreams(stream_list);
8837 CDBG("%s: X", __func__);
8838 return rc;
8839 }
8840
8841 /*===========================================================================
8842 * FUNCTION : construct_default_request_settings
8843 *
8844 * DESCRIPTION: Configure a settings buffer to meet the required use case
8845 *
8846 * PARAMETERS :
8847 *
8848 *
8849 * RETURN : Success: Return valid metadata
8850 * Failure: Return NULL
8851 *==========================================================================*/
8852 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)8853 construct_default_request_settings(const struct camera3_device *device,
8854 int type)
8855 {
8856
8857 CDBG("%s: E", __func__);
8858 camera_metadata_t* fwk_metadata = NULL;
8859 QCamera3HardwareInterface *hw =
8860 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8861 if (!hw) {
8862 ALOGE("%s: NULL camera device", __func__);
8863 return NULL;
8864 }
8865
8866 fwk_metadata = hw->translateCapabilityToMetadata(type);
8867
8868 CDBG("%s: X", __func__);
8869 return fwk_metadata;
8870 }
8871
8872 /*===========================================================================
8873 * FUNCTION : process_capture_request
8874 *
8875 * DESCRIPTION:
8876 *
8877 * PARAMETERS :
8878 *
8879 *
8880 * RETURN :
8881 *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)8882 int QCamera3HardwareInterface::process_capture_request(
8883 const struct camera3_device *device,
8884 camera3_capture_request_t *request)
8885 {
8886 CDBG("%s: E", __func__);
8887 QCamera3HardwareInterface *hw =
8888 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8889 if (!hw) {
8890 ALOGE("%s: NULL camera device", __func__);
8891 return -EINVAL;
8892 }
8893
8894 int rc = hw->processCaptureRequest(request);
8895 CDBG("%s: X", __func__);
8896 return rc;
8897 }
8898
8899 /*===========================================================================
8900 * FUNCTION : dump
8901 *
8902 * DESCRIPTION:
8903 *
8904 * PARAMETERS :
8905 *
8906 *
8907 * RETURN :
8908 *==========================================================================*/
8909
dump(const struct camera3_device * device,int fd)8910 void QCamera3HardwareInterface::dump(
8911 const struct camera3_device *device, int fd)
8912 {
8913 /* Log level property is read when "adb shell dumpsys media.camera" is
8914 called so that the log level can be controlled without restarting
8915 the media server */
8916 getLogLevel();
8917
8918 CDBG("%s: E", __func__);
8919 QCamera3HardwareInterface *hw =
8920 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8921 if (!hw) {
8922 ALOGE("%s: NULL camera device", __func__);
8923 return;
8924 }
8925
8926 hw->dump(fd);
8927 CDBG("%s: X", __func__);
8928 return;
8929 }
8930
8931 /*===========================================================================
8932 * FUNCTION : flush
8933 *
8934 * DESCRIPTION:
8935 *
8936 * PARAMETERS :
8937 *
8938 *
8939 * RETURN :
8940 *==========================================================================*/
8941
flush(const struct camera3_device * device)8942 int QCamera3HardwareInterface::flush(
8943 const struct camera3_device *device)
8944 {
8945 int rc;
8946 CDBG("%s: E", __func__);
8947 QCamera3HardwareInterface *hw =
8948 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8949 if (!hw) {
8950 ALOGE("%s: NULL camera device", __func__);
8951 return -EINVAL;
8952 }
8953
8954 rc = hw->flush();
8955 CDBG("%s: X", __func__);
8956 return rc;
8957 }
8958
8959 /*===========================================================================
8960 * FUNCTION : close_camera_device
8961 *
8962 * DESCRIPTION:
8963 *
8964 * PARAMETERS :
8965 *
8966 *
8967 * RETURN :
8968 *==========================================================================*/
close_camera_device(struct hw_device_t * device)8969 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8970 {
8971 CDBG("%s: E", __func__);
8972 int ret = NO_ERROR;
8973 QCamera3HardwareInterface *hw =
8974 reinterpret_cast<QCamera3HardwareInterface *>(
8975 reinterpret_cast<camera3_device_t *>(device)->priv);
8976 if (!hw) {
8977 ALOGE("NULL camera device");
8978 return BAD_VALUE;
8979 }
8980 delete hw;
8981
8982 CDBG("%s: X", __func__);
8983 return ret;
8984 }
8985
8986 /*===========================================================================
8987 * FUNCTION : getWaveletDenoiseProcessPlate
8988 *
8989 * DESCRIPTION: query wavelet denoise process plate
8990 *
8991 * PARAMETERS : None
8992 *
8993 * RETURN : WNR prcocess plate value
8994 *==========================================================================*/
getWaveletDenoiseProcessPlate()8995 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8996 {
8997 char prop[PROPERTY_VALUE_MAX];
8998 memset(prop, 0, sizeof(prop));
8999 property_get("persist.denoise.process.plates", prop, "0");
9000 int processPlate = atoi(prop);
9001 switch(processPlate) {
9002 case 0:
9003 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9004 case 1:
9005 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9006 case 2:
9007 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9008 case 3:
9009 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9010 default:
9011 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9012 }
9013 }
9014
9015
9016 /*===========================================================================
9017 * FUNCTION : getTemporalDenoiseProcessPlate
9018 *
9019 * DESCRIPTION: query temporal denoise process plate
9020 *
9021 * PARAMETERS : None
9022 *
9023 * RETURN : TNR prcocess plate value
9024 *==========================================================================*/
getTemporalDenoiseProcessPlate()9025 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9026 {
9027 char prop[PROPERTY_VALUE_MAX];
9028 memset(prop, 0, sizeof(prop));
9029 property_get("persist.tnr.process.plates", prop, "0");
9030 int processPlate = atoi(prop);
9031 switch(processPlate) {
9032 case 0:
9033 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9034 case 1:
9035 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9036 case 2:
9037 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9038 case 3:
9039 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9040 default:
9041 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9042 }
9043 }
9044
9045
9046 /*===========================================================================
9047 * FUNCTION : extractSceneMode
9048 *
9049 * DESCRIPTION: Extract scene mode from frameworks set metadata
9050 *
9051 * PARAMETERS :
9052 * @frame_settings: CameraMetadata reference
9053 * @metaMode: ANDROID_CONTORL_MODE
9054 * @hal_metadata: hal metadata structure
9055 *
9056 * RETURN : None
9057 *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)9058 int32_t QCamera3HardwareInterface::extractSceneMode(
9059 const CameraMetadata &frame_settings, uint8_t metaMode,
9060 metadata_buffer_t *hal_metadata)
9061 {
9062 int32_t rc = NO_ERROR;
9063
9064 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9065 camera_metadata_ro_entry entry =
9066 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9067 if (0 == entry.count)
9068 return rc;
9069
9070 uint8_t fwk_sceneMode = entry.data.u8[0];
9071
9072 int val = lookupHalName(SCENE_MODES_MAP,
9073 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9074 fwk_sceneMode);
9075 if (NAME_NOT_FOUND != val) {
9076 uint8_t sceneMode = (uint8_t)val;
9077 CDBG("%s: sceneMode: %d", __func__, sceneMode);
9078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9079 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9080 rc = BAD_VALUE;
9081 }
9082 }
9083 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9084 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9085 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9086 CDBG("%s: sceneMode: %d", __func__, sceneMode);
9087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9088 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9089 rc = BAD_VALUE;
9090 }
9091 }
9092 return rc;
9093 }
9094
9095 /*===========================================================================
9096 * FUNCTION : needRotationReprocess
9097 *
9098 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9099 *
9100 * PARAMETERS : none
9101 *
9102 * RETURN : true: needed
9103 * false: no need
9104 *==========================================================================*/
needRotationReprocess()9105 bool QCamera3HardwareInterface::needRotationReprocess()
9106 {
9107 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9108 // current rotation is not zero, and pp has the capability to process rotation
9109 CDBG_HIGH("%s: need do reprocess for rotation", __func__);
9110 return true;
9111 }
9112
9113 return false;
9114 }
9115
9116 /*===========================================================================
9117 * FUNCTION : needReprocess
9118 *
9119 * DESCRIPTION: if reprocess in needed
9120 *
9121 * PARAMETERS : none
9122 *
9123 * RETURN : true: needed
9124 * false: no need
9125 *==========================================================================*/
needReprocess(uint32_t postprocess_mask)9126 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9127 {
9128 if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
9129 // TODO: add for ZSL HDR later
9130 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9131 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9132 CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
9133 return true;
9134 } else {
9135 CDBG_HIGH("%s: already post processed frame", __func__);
9136 return false;
9137 }
9138 }
9139 return needRotationReprocess();
9140 }
9141
9142 /*===========================================================================
9143 * FUNCTION : needJpegRotation
9144 *
9145 * DESCRIPTION: if rotation from jpeg is needed
9146 *
9147 * PARAMETERS : none
9148 *
9149 * RETURN : true: needed
9150 * false: no need
9151 *==========================================================================*/
needJpegRotation()9152 bool QCamera3HardwareInterface::needJpegRotation()
9153 {
9154 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9155 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9156 CDBG("%s: Need Jpeg to do the rotation", __func__);
9157 return true;
9158 }
9159 return false;
9160 }
9161
9162 /*===========================================================================
9163 * FUNCTION : addOfflineReprocChannel
9164 *
9165 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9166 * coming from input channel
9167 *
9168 * PARAMETERS :
9169 * @config : reprocess configuration
9170 * @inputChHandle : pointer to the input (source) channel
9171 *
9172 *
9173 * RETURN : Ptr to the newly created channel obj. NULL if failed.
9174 *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)9175 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9176 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9177 {
9178 int32_t rc = NO_ERROR;
9179 QCamera3ReprocessChannel *pChannel = NULL;
9180
9181 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9182 mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9183 CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9184 if (NULL == pChannel) {
9185 ALOGE("%s: no mem for reprocess channel", __func__);
9186 return NULL;
9187 }
9188
9189 rc = pChannel->initialize(IS_TYPE_NONE);
9190 if (rc != NO_ERROR) {
9191 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
9192 delete pChannel;
9193 return NULL;
9194 }
9195
9196 // pp feature config
9197 cam_pp_feature_config_t pp_config;
9198 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9199
9200 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9201
9202 rc = pChannel->addReprocStreamsFromSource(pp_config,
9203 config,
9204 IS_TYPE_NONE,
9205 mMetadataChannel);
9206
9207 if (rc != NO_ERROR) {
9208 delete pChannel;
9209 return NULL;
9210 }
9211 return pChannel;
9212 }
9213
9214 /*===========================================================================
9215 * FUNCTION : getMobicatMask
9216 *
9217 * DESCRIPTION: returns mobicat mask
9218 *
9219 * PARAMETERS : none
9220 *
9221 * RETURN : mobicat mask
9222 *
9223 *==========================================================================*/
getMobicatMask()9224 uint8_t QCamera3HardwareInterface::getMobicatMask()
9225 {
9226 return m_MobicatMask;
9227 }
9228
9229 /*===========================================================================
9230 * FUNCTION : setMobicat
9231 *
9232 * DESCRIPTION: set Mobicat on/off.
9233 *
9234 * PARAMETERS :
9235 * @params : none
9236 *
9237 * RETURN : int32_t type of status
9238 * NO_ERROR -- success
9239 * none-zero failure code
9240 *==========================================================================*/
setMobicat()9241 int32_t QCamera3HardwareInterface::setMobicat()
9242 {
9243 char value [PROPERTY_VALUE_MAX];
9244 property_get("persist.camera.mobicat", value, "0");
9245 int32_t ret = NO_ERROR;
9246 uint8_t enableMobi = (uint8_t)atoi(value);
9247
9248 if (enableMobi) {
9249 tune_cmd_t tune_cmd;
9250 tune_cmd.type = SET_RELOAD_CHROMATIX;
9251 tune_cmd.module = MODULE_ALL;
9252 tune_cmd.value = TRUE;
9253 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9254 CAM_INTF_PARM_SET_VFE_COMMAND,
9255 tune_cmd);
9256
9257 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9258 CAM_INTF_PARM_SET_PP_COMMAND,
9259 tune_cmd);
9260 }
9261 m_MobicatMask = enableMobi;
9262
9263 return ret;
9264 }
9265
9266 /*===========================================================================
9267 * FUNCTION : getLogLevel
9268 *
9269 * DESCRIPTION: Reads the log level property into a variable
9270 *
9271 * PARAMETERS :
9272 * None
9273 *
9274 * RETURN :
9275 * None
9276 *==========================================================================*/
getLogLevel()9277 void QCamera3HardwareInterface::getLogLevel()
9278 {
9279 char prop[PROPERTY_VALUE_MAX];
9280 uint32_t globalLogLevel = 0;
9281
9282 property_get("persist.camera.hal.debug", prop, "0");
9283 int val = atoi(prop);
9284 if (0 <= val) {
9285 gCamHal3LogLevel = (uint32_t)val;
9286 }
9287 property_get("persist.camera.global.debug", prop, "0");
9288 val = atoi(prop);
9289 if (0 <= val) {
9290 globalLogLevel = (uint32_t)val;
9291 }
9292
9293 /* Highest log level among hal.logs and global.logs is selected */
9294 if (gCamHal3LogLevel < globalLogLevel)
9295 gCamHal3LogLevel = globalLogLevel;
9296
9297 return;
9298 }
9299
9300 /*===========================================================================
9301 * FUNCTION : validateStreamRotations
9302 *
9303 * DESCRIPTION: Check if the rotations requested are supported
9304 *
9305 * PARAMETERS :
9306 * @stream_list : streams to be configured
9307 *
9308 * RETURN : NO_ERROR on success
9309 * -EINVAL on failure
9310 *
9311 *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)9312 int QCamera3HardwareInterface::validateStreamRotations(
9313 camera3_stream_configuration_t *streamList)
9314 {
9315 int rc = NO_ERROR;
9316
9317 /*
9318 * Loop through all streams requested in configuration
9319 * Check if unsupported rotations have been requested on any of them
9320 */
9321 for (size_t j = 0; j < streamList->num_streams; j++){
9322 camera3_stream_t *newStream = streamList->streams[j];
9323
9324 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9325 bool isImplDef = (newStream->format ==
9326 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9327 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9328 isImplDef);
9329
9330 if (isRotated && (!isImplDef || isZsl)) {
9331 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9332 "type:%d and stream format:%d", __func__,
9333 newStream->rotation, newStream->stream_type,
9334 newStream->format);
9335 rc = -EINVAL;
9336 break;
9337 }
9338 }
9339 return rc;
9340 }
9341
9342 /*===========================================================================
9343 * FUNCTION : getFlashInfo
9344 *
9345 * DESCRIPTION: Retrieve information about whether the device has a flash.
9346 *
9347 * PARAMETERS :
9348 * @cameraId : Camera id to query
9349 * @hasFlash : Boolean indicating whether there is a flash device
9350 * associated with given camera
9351 * @flashNode : If a flash device exists, this will be its device node.
9352 *
9353 * RETURN :
9354 * None
9355 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])9356 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9357 bool& hasFlash,
9358 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9359 {
9360 cam_capability_t* camCapability = gCamCapability[cameraId];
9361 if (NULL == camCapability) {
9362 hasFlash = false;
9363 flashNode[0] = '\0';
9364 } else {
9365 hasFlash = camCapability->flash_available;
9366 strlcpy(flashNode,
9367 (char*)camCapability->flash_dev_name,
9368 QCAMERA_MAX_FILEPATH_LENGTH);
9369 }
9370 }
9371
9372 /*===========================================================================
9373 * FUNCTION : getEepromVersionInfo
9374 *
9375 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
9376 *
9377 * PARAMETERS : None
9378 *
9379 * RETURN : string describing EEPROM version
9380 * "\0" if no such info available
9381 *==========================================================================*/
getEepromVersionInfo()9382 const char *QCamera3HardwareInterface::getEepromVersionInfo()
9383 {
9384 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9385 }
9386
9387 /*===========================================================================
9388 * FUNCTION : getLdafCalib
9389 *
9390 * DESCRIPTION: Retrieve Laser AF calibration data
9391 *
9392 * PARAMETERS : None
9393 *
9394 * RETURN : Two uint32_t describing laser AF calibration data
9395 * NULL if none is available.
9396 *==========================================================================*/
getLdafCalib()9397 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9398 {
9399 if (mLdafCalibExist) {
9400 return &mLdafCalib[0];
9401 } else {
9402 return NULL;
9403 }
9404 }
9405
9406 /*===========================================================================
9407 * FUNCTION : dynamicUpdateMetaStreamInfo
9408 *
9409 * DESCRIPTION: This function:
9410 * (1) stops all the channels
9411 * (2) returns error on pending requests and buffers
9412 * (3) sends metastream_info in setparams
9413 * (4) starts all channels
9414 * This is useful when sensor has to be restarted to apply any
9415 * settings such as frame rate from a different sensor mode
9416 *
9417 * PARAMETERS : None
9418 *
9419 * RETURN : NO_ERROR on success
9420 * Error codes on failure
9421 *
9422 *==========================================================================*/
dynamicUpdateMetaStreamInfo()9423 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9424 {
9425 ATRACE_CALL();
9426 int rc = NO_ERROR;
9427
9428 CDBG("%s: E", __func__);
9429
9430 rc = stopAllChannels();
9431 if (rc < 0) {
9432 ALOGE("%s: stopAllChannels failed", __func__);
9433 return rc;
9434 }
9435
9436 rc = notifyErrorForPendingRequests();
9437 if (rc < 0) {
9438 ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9439 return rc;
9440 }
9441
9442 /* Send meta stream info once again so that ISP can start */
9443 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9444 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9445 CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9446 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9447 mParameters);
9448 if (rc < 0) {
9449 ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9450 __func__);
9451 }
9452
9453 rc = startAllChannels();
9454 if (rc < 0) {
9455 ALOGE("%s: startAllChannels failed", __func__);
9456 return rc;
9457 }
9458
9459 CDBG("%s:%d X", __func__, __LINE__);
9460 return rc;
9461 }
9462
9463 /*===========================================================================
9464 * FUNCTION : stopAllChannels
9465 *
9466 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9467 *
9468 * PARAMETERS : None
9469 *
9470 * RETURN : NO_ERROR on success
9471 * Error codes on failure
9472 *
9473 *==========================================================================*/
stopAllChannels()9474 int32_t QCamera3HardwareInterface::stopAllChannels()
9475 {
9476 int32_t rc = NO_ERROR;
9477
9478 // Stop the Streams/Channels
9479 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9480 it != mStreamInfo.end(); it++) {
9481 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9482 channel->stop();
9483 (*it)->status = INVALID;
9484 }
9485
9486 if (mSupportChannel) {
9487 mSupportChannel->stop();
9488 }
9489 if (mAnalysisChannel) {
9490 mAnalysisChannel->stop();
9491 }
9492 if (mRawDumpChannel) {
9493 mRawDumpChannel->stop();
9494 }
9495 if (mMetadataChannel) {
9496 /* If content of mStreamInfo is not 0, there is metadata stream */
9497 mMetadataChannel->stop();
9498 }
9499
9500 CDBG("%s:%d All channels stopped", __func__, __LINE__);
9501 return rc;
9502 }
9503
9504 /*===========================================================================
9505 * FUNCTION : startAllChannels
9506 *
9507 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9508 *
9509 * PARAMETERS : None
9510 *
9511 * RETURN : NO_ERROR on success
9512 * Error codes on failure
9513 *
9514 *==========================================================================*/
startAllChannels()9515 int32_t QCamera3HardwareInterface::startAllChannels()
9516 {
9517 int32_t rc = NO_ERROR;
9518
9519 CDBG("%s: Start all channels ", __func__);
9520 // Start the Streams/Channels
9521 if (mMetadataChannel) {
9522 /* If content of mStreamInfo is not 0, there is metadata stream */
9523 rc = mMetadataChannel->start();
9524 if (rc < 0) {
9525 ALOGE("%s: META channel start failed", __func__);
9526 return rc;
9527 }
9528 }
9529 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9530 it != mStreamInfo.end(); it++) {
9531 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9532 rc = channel->start();
9533 if (rc < 0) {
9534 ALOGE("%s: channel start failed", __func__);
9535 return rc;
9536 }
9537 }
9538 if (mAnalysisChannel) {
9539 mAnalysisChannel->start();
9540 }
9541 if (mSupportChannel) {
9542 rc = mSupportChannel->start();
9543 if (rc < 0) {
9544 ALOGE("%s: Support channel start failed", __func__);
9545 return rc;
9546 }
9547 }
9548 if (mRawDumpChannel) {
9549 rc = mRawDumpChannel->start();
9550 if (rc < 0) {
9551 ALOGE("%s: RAW dump channel start failed", __func__);
9552 return rc;
9553 }
9554 }
9555
9556 CDBG("%s:%d All channels started", __func__, __LINE__);
9557 return rc;
9558 }
9559
9560 /*===========================================================================
9561 * FUNCTION : notifyErrorForPendingRequests
9562 *
9563 * DESCRIPTION: This function sends error for all the pending requests/buffers
9564 *
9565 * PARAMETERS : None
9566 *
9567 * RETURN : Error codes
9568 * NO_ERROR on success
9569 *
9570 *==========================================================================*/
notifyErrorForPendingRequests()9571 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9572 {
9573 int32_t rc = NO_ERROR;
9574 unsigned int frameNum = 0;
9575 camera3_capture_result_t result;
9576 camera3_stream_buffer_t *pStream_Buf = NULL;
9577 FlushMap flushMap;
9578
9579 memset(&result, 0, sizeof(camera3_capture_result_t));
9580
9581 if (mPendingRequestsList.size() > 0) {
9582 pendingRequestIterator i = mPendingRequestsList.begin();
9583 frameNum = i->frame_number;
9584 } else {
9585 /* There might still be pending buffers even though there are
9586 no pending requests. Setting the frameNum to MAX so that
9587 all the buffers with smaller frame numbers are returned */
9588 frameNum = UINT_MAX;
9589 }
9590
9591 CDBG_HIGH("%s: Oldest frame num on mPendingRequestsList = %d",
9592 __func__, frameNum);
9593
9594 // Go through the pending buffers and group them depending
9595 // on frame number
9596 for (List<PendingBufferInfo>::iterator k =
9597 mPendingBuffersMap.mPendingBufferList.begin();
9598 k != mPendingBuffersMap.mPendingBufferList.end();) {
9599
9600 if (k->frame_number < frameNum) {
9601 ssize_t idx = flushMap.indexOfKey(k->frame_number);
9602 if (idx == NAME_NOT_FOUND) {
9603 Vector<PendingBufferInfo> pending;
9604 pending.add(*k);
9605 flushMap.add(k->frame_number, pending);
9606 } else {
9607 Vector<PendingBufferInfo> &pending =
9608 flushMap.editValueFor(k->frame_number);
9609 pending.add(*k);
9610 }
9611
9612 mPendingBuffersMap.num_buffers--;
9613 k = mPendingBuffersMap.mPendingBufferList.erase(k);
9614 } else {
9615 k++;
9616 }
9617 }
9618
9619 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9620 uint32_t frame_number = flushMap.keyAt(iFlush);
9621 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9622
9623 // Send Error notify to frameworks for each buffer for which
9624 // metadata buffer is already sent
9625 CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9626 __func__, frame_number, pending.size());
9627
9628 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9629 if (NULL == pStream_Buf) {
9630 ALOGE("%s: No memory for pending buffers array", __func__);
9631 return NO_MEMORY;
9632 }
9633 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9634
9635 for (size_t j = 0; j < pending.size(); j++) {
9636 const PendingBufferInfo &info = pending.itemAt(j);
9637 camera3_notify_msg_t notify_msg;
9638 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
9639 notify_msg.type = CAMERA3_MSG_ERROR;
9640 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9641 notify_msg.message.error.error_stream = info.stream;
9642 notify_msg.message.error.frame_number = frame_number;
9643 pStream_Buf[j].acquire_fence = -1;
9644 pStream_Buf[j].release_fence = -1;
9645 pStream_Buf[j].buffer = info.buffer;
9646 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9647 pStream_Buf[j].stream = info.stream;
9648 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
9649 CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9650 frame_number, info.stream);
9651 }
9652
9653 result.result = NULL;
9654 result.frame_number = frame_number;
9655 result.num_output_buffers = (uint32_t)pending.size();
9656 result.output_buffers = pStream_Buf;
9657 mCallbackOps->process_capture_result(mCallbackOps, &result);
9658
9659 delete [] pStream_Buf;
9660 }
9661
9662 CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9663
9664 flushMap.clear();
9665 for (List<PendingBufferInfo>::iterator k =
9666 mPendingBuffersMap.mPendingBufferList.begin();
9667 k != mPendingBuffersMap.mPendingBufferList.end();) {
9668 ssize_t idx = flushMap.indexOfKey(k->frame_number);
9669 if (idx == NAME_NOT_FOUND) {
9670 Vector<PendingBufferInfo> pending;
9671 pending.add(*k);
9672 flushMap.add(k->frame_number, pending);
9673 } else {
9674 Vector<PendingBufferInfo> &pending =
9675 flushMap.editValueFor(k->frame_number);
9676 pending.add(*k);
9677 }
9678
9679 mPendingBuffersMap.num_buffers--;
9680 k = mPendingBuffersMap.mPendingBufferList.erase(k);
9681 }
9682
9683 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9684
9685 // Go through the pending requests info and send error request to framework
9686 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9687 uint32_t frame_number = flushMap.keyAt(iFlush);
9688 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9689 CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9690 __func__, frame_number);
9691
9692 // Send shutter notify to frameworks
9693 camera3_notify_msg_t notify_msg;
9694 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
9695 notify_msg.type = CAMERA3_MSG_ERROR;
9696 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9697 notify_msg.message.error.error_stream = NULL;
9698 notify_msg.message.error.frame_number = frame_number;
9699 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
9700
9701 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9702 if (NULL == pStream_Buf) {
9703 ALOGE("%s: No memory for pending buffers array", __func__);
9704 return NO_MEMORY;
9705 }
9706 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9707
9708 for (size_t j = 0; j < pending.size(); j++) {
9709 const PendingBufferInfo &info = pending.itemAt(j);
9710 pStream_Buf[j].acquire_fence = -1;
9711 pStream_Buf[j].release_fence = -1;
9712 pStream_Buf[j].buffer = info.buffer;
9713 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9714 pStream_Buf[j].stream = info.stream;
9715 }
9716
9717 result.input_buffer = i->input_buffer;
9718 result.num_output_buffers = (uint32_t)pending.size();
9719 result.output_buffers = pStream_Buf;
9720 result.result = NULL;
9721 result.frame_number = frame_number;
9722 mCallbackOps->process_capture_result(mCallbackOps, &result);
9723 delete [] pStream_Buf;
9724 i = erasePendingRequest(i);
9725 }
9726
9727 /* Reset pending frame Drop list and requests list */
9728 mPendingFrameDropList.clear();
9729
9730 flushMap.clear();
9731 mPendingBuffersMap.num_buffers = 0;
9732 mPendingBuffersMap.mPendingBufferList.clear();
9733 mPendingReprocessResultList.clear();
9734 CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9735
9736 return rc;
9737 }
9738
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)9739 bool QCamera3HardwareInterface::isOnEncoder(
9740 const cam_dimension_t max_viewfinder_size,
9741 uint32_t width, uint32_t height)
9742 {
9743 return (width > (uint32_t)max_viewfinder_size.width ||
9744 height > (uint32_t)max_viewfinder_size.height);
9745 }
9746
9747 /*===========================================================================
9748 * FUNCTION : setBundleInfo
9749 *
9750 * DESCRIPTION: Set bundle info for all streams that are bundle.
9751 *
9752 * PARAMETERS : None
9753 *
9754 * RETURN : NO_ERROR on success
9755 * Error codes on failure
9756 *==========================================================================*/
setBundleInfo()9757 int32_t QCamera3HardwareInterface::setBundleInfo()
9758 {
9759 int32_t rc = NO_ERROR;
9760
9761 if (mChannelHandle) {
9762 cam_bundle_config_t bundleInfo;
9763 memset(&bundleInfo, 0, sizeof(bundleInfo));
9764 rc = mCameraHandle->ops->get_bundle_info(
9765 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9766 if (rc != NO_ERROR) {
9767 ALOGE("%s: get_bundle_info failed", __func__);
9768 return rc;
9769 }
9770 if (mAnalysisChannel) {
9771 mAnalysisChannel->setBundleInfo(bundleInfo);
9772 }
9773 if (mSupportChannel) {
9774 mSupportChannel->setBundleInfo(bundleInfo);
9775 }
9776 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9777 it != mStreamInfo.end(); it++) {
9778 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9779 channel->setBundleInfo(bundleInfo);
9780 }
9781 if (mRawDumpChannel) {
9782 mRawDumpChannel->setBundleInfo(bundleInfo);
9783 }
9784 }
9785
9786 return rc;
9787 }
9788
9789 }; //end namespace qcamera
9790