1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3HWI"
32 //#define LOG_NDEBUG 0
33
34 #define __STDC_LIMIT_MACROS
35 #include <cutils/properties.h>
36 #include <hardware/camera3.h>
37 #include <camera/CameraMetadata.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include <fcntl.h>
41 #include <stdint.h>
42 #include <utils/Log.h>
43 #include <utils/Errors.h>
44 #include <utils/Trace.h>
45 #include <sync/sync.h>
46 #include <gralloc_priv.h>
47 #include "util/QCameraFlash.h"
48 #include "QCamera3HWI.h"
49 #include "QCamera3Mem.h"
50 #include "QCamera3Channel.h"
51 #include "QCamera3PostProc.h"
52 #include "QCamera3VendorTags.h"
53
54 using namespace android;
55
56 namespace qcamera {
57
58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60 #define EMPTY_PIPELINE_DELAY 2
61 #define PARTIAL_RESULT_COUNT 3
62 #define FRAME_SKIP_DELAY 0
63 #define CAM_MAX_SYNC_LATENCY 4
64
65 #define MAX_VALUE_8BIT ((1<<8)-1)
66 #define MAX_VALUE_10BIT ((1<<10)-1)
67 #define MAX_VALUE_12BIT ((1<<12)-1)
68
69 #define VIDEO_4K_WIDTH 3840
70 #define VIDEO_4K_HEIGHT 2160
71
72 #define MAX_EIS_WIDTH 1920
73 #define MAX_EIS_HEIGHT 1080
74
75 #define MAX_RAW_STREAMS 1
76 #define MAX_STALLING_STREAMS 1
77 #define MAX_PROCESSED_STREAMS 3
78 /* Batch mode is enabled only if FPS set is equal to or greater than this */
79 #define MIN_FPS_FOR_BATCH_MODE (120)
80 #define PREVIEW_FPS_FOR_HFR (30)
81 #define DEFAULT_VIDEO_FPS (30.0)
82 #define MAX_HFR_BATCH_SIZE (8)
83 #define REGIONS_TUPLE_COUNT 5
84 #define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
85
86 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
89 CAM_QCOM_FEATURE_CROP |\
90 CAM_QCOM_FEATURE_ROTATION |\
91 CAM_QCOM_FEATURE_SHARPNESS |\
92 CAM_QCOM_FEATURE_SCALE |\
93 CAM_QCOM_FEATURE_CAC |\
94 CAM_QCOM_FEATURE_CDS )
95
96 #define TIMEOUT_NEVER -1
97
98 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101 volatile uint32_t gCamHal3LogLevel = 1;
102
103 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104 {"On", CAM_CDS_MODE_ON},
105 {"Off", CAM_CDS_MODE_OFF},
106 {"Auto",CAM_CDS_MODE_AUTO}
107 };
108
109 const QCamera3HardwareInterface::QCameraMap<
110 camera_metadata_enum_android_control_effect_mode_t,
111 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
113 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
114 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
115 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
116 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
117 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
118 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
121 };
122
123 const QCamera3HardwareInterface::QCameraMap<
124 camera_metadata_enum_android_control_awb_mode_t,
125 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
127 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
128 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
129 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
130 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
132 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
134 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
135 };
136
137 const QCamera3HardwareInterface::QCameraMap<
138 camera_metadata_enum_android_control_scene_mode_t,
139 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
141 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
142 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
143 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
144 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
145 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
147 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
148 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
149 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
150 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
151 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
152 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
153 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
154 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
155 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
156 };
157
158 const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_af_mode_t,
160 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
162 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
163 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
164 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
165 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
166 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168 };
169
170 const QCamera3HardwareInterface::QCameraMap<
171 camera_metadata_enum_android_color_correction_aberration_mode_t,
172 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174 CAM_COLOR_CORRECTION_ABERRATION_OFF },
175 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176 CAM_COLOR_CORRECTION_ABERRATION_FAST },
177 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179 };
180
181 const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_control_ae_antibanding_mode_t,
183 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
185 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188 };
189
190 const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_ae_mode_t,
192 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
194 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
195 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
196 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
197 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198 };
199
200 const QCamera3HardwareInterface::QCameraMap<
201 camera_metadata_enum_android_flash_mode_t,
202 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
204 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
206 };
207
208 const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_statistics_face_detect_mode_t,
210 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
212 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
213 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
214 };
215
216 const QCamera3HardwareInterface::QCameraMap<
217 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220 CAM_FOCUS_UNCALIBRATED },
221 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222 CAM_FOCUS_APPROXIMATE },
223 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224 CAM_FOCUS_CALIBRATED }
225 };
226
227 const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_lens_state_t,
229 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
231 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
232 };
233
234 const int32_t available_thumbnail_sizes[] = {0, 0,
235 176, 144,
236 320, 240,
237 432, 288,
238 480, 288,
239 512, 288,
240 512, 384};
241
242 const QCamera3HardwareInterface::QCameraMap<
243 camera_metadata_enum_android_sensor_test_pattern_mode_t,
244 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
246 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
247 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
248 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
250 };
251
252 /* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257 const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_sensor_reference_illuminant1_t,
259 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276 };
277
278 const QCamera3HardwareInterface::QCameraMap<
279 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280 { 60, CAM_HFR_MODE_60FPS},
281 { 90, CAM_HFR_MODE_90FPS},
282 { 120, CAM_HFR_MODE_120FPS},
283 { 150, CAM_HFR_MODE_150FPS},
284 { 180, CAM_HFR_MODE_180FPS},
285 { 210, CAM_HFR_MODE_210FPS},
286 { 240, CAM_HFR_MODE_240FPS},
287 { 480, CAM_HFR_MODE_480FPS},
288 };
289
290 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291 .initialize = QCamera3HardwareInterface::initialize,
292 .configure_streams = QCamera3HardwareInterface::configure_streams,
293 .register_stream_buffers = NULL,
294 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
295 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
296 .get_metadata_vendor_tag_ops = NULL,
297 .dump = QCamera3HardwareInterface::dump,
298 .flush = QCamera3HardwareInterface::flush,
299 .reserved = {0},
300 };
301
302 /*===========================================================================
303 * FUNCTION : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 * @cameraId : camera ID
309 *
310 * RETURN : none
311 *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)312 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313 const camera_module_callbacks_t *callbacks)
314 : mCameraId(cameraId),
315 mCameraHandle(NULL),
316 mCameraOpened(false),
317 mCameraInitialized(false),
318 mCallbackOps(NULL),
319 mMetadataChannel(NULL),
320 mPictureChannel(NULL),
321 mRawChannel(NULL),
322 mSupportChannel(NULL),
323 mAnalysisChannel(NULL),
324 mRawDumpChannel(NULL),
325 mDummyBatchChannel(NULL),
326 mChannelHandle(0),
327 mFirstRequest(false),
328 mFirstConfiguration(true),
329 mFlush(false),
330 mParamHeap(NULL),
331 mParameters(NULL),
332 mPrevParameters(NULL),
333 m_bIsVideo(false),
334 m_bIs4KVideo(false),
335 m_bEisSupportedSize(false),
336 m_bEisEnable(false),
337 m_MobicatMask(0),
338 mMinProcessedFrameDuration(0),
339 mMinJpegFrameDuration(0),
340 mMinRawFrameDuration(0),
341 mMetaFrameCount(0U),
342 mUpdateDebugLevel(false),
343 mCallbacks(callbacks),
344 mCaptureIntent(0),
345 mHybridAeEnable(0),
346 mBatchSize(0),
347 mToBeQueuedVidBufs(0),
348 mHFRVideoFps(DEFAULT_VIDEO_FPS),
349 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
350 mFirstFrameNumberInBatch(0),
351 mNeedSensorRestart(false),
352 mLdafCalibExist(false),
353 mPowerHintEnabled(false),
354 mLastCustIntentFrmNum(-1)
355 {
356 getLogLevel();
357 m_perfLock.lock_init();
358 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
359 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
360 mCameraDevice.common.close = close_camera_device;
361 mCameraDevice.ops = &mCameraOps;
362 mCameraDevice.priv = this;
363 gCamCapability[cameraId]->version = CAM_HAL_V3;
364 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
365 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
366 gCamCapability[cameraId]->min_num_pp_bufs = 3;
367 pthread_cond_init(&mRequestCond, NULL);
368 mPendingLiveRequest = 0;
369 mCurrentRequestId = -1;
370 pthread_mutex_init(&mMutex, NULL);
371
372 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
373 mDefaultMetadata[i] = NULL;
374
375 // Getting system props of different kinds
376 char prop[PROPERTY_VALUE_MAX];
377 memset(prop, 0, sizeof(prop));
378 property_get("persist.camera.raw.dump", prop, "0");
379 mEnableRawDump = atoi(prop);
380 if (mEnableRawDump)
381 CDBG("%s: Raw dump from Camera HAL enabled", __func__);
382
383 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
384 memset(mLdafCalib, 0, sizeof(mLdafCalib));
385
386 memset(prop, 0, sizeof(prop));
387 property_get("persist.camera.tnr.preview", prop, "1");
388 m_bTnrPreview = (uint8_t)atoi(prop);
389
390 memset(prop, 0, sizeof(prop));
391 property_get("persist.camera.tnr.video", prop, "1");
392 m_bTnrVideo = (uint8_t)atoi(prop);
393
394 mPendingBuffersMap.num_buffers = 0;
395 mPendingBuffersMap.last_frame_number = -1;
396 }
397
398 /*===========================================================================
399 * FUNCTION : ~QCamera3HardwareInterface
400 *
401 * DESCRIPTION: destructor of QCamera3HardwareInterface
402 *
403 * PARAMETERS : none
404 *
405 * RETURN : none
406 *==========================================================================*/
~QCamera3HardwareInterface()407 QCamera3HardwareInterface::~QCamera3HardwareInterface()
408 {
409 CDBG("%s: E", __func__);
410 bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
411
412 /* Turn off current power hint before acquiring perfLock in case they
413 * conflict with each other */
414 disablePowerHint();
415
416 m_perfLock.lock_acq();
417
418 /* We need to stop all streams before deleting any stream */
419 if (mRawDumpChannel) {
420 mRawDumpChannel->stop();
421 }
422
423 // NOTE: 'camera3_stream_t *' objects are already freed at
424 // this stage by the framework
425 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
426 it != mStreamInfo.end(); it++) {
427 QCamera3ProcessingChannel *channel = (*it)->channel;
428 if (channel) {
429 channel->stop();
430 }
431 }
432 if (mSupportChannel)
433 mSupportChannel->stop();
434
435 if (mAnalysisChannel) {
436 mAnalysisChannel->stop();
437 }
438 if (mMetadataChannel) {
439 mMetadataChannel->stop();
440 }
441 if (mChannelHandle) {
442 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
443 mChannelHandle);
444 ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
445 }
446
447 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
448 it != mStreamInfo.end(); it++) {
449 QCamera3ProcessingChannel *channel = (*it)->channel;
450 if (channel)
451 delete channel;
452 free (*it);
453 }
454 if (mSupportChannel) {
455 delete mSupportChannel;
456 mSupportChannel = NULL;
457 }
458
459 if (mAnalysisChannel) {
460 delete mAnalysisChannel;
461 mAnalysisChannel = NULL;
462 }
463 if (mRawDumpChannel) {
464 delete mRawDumpChannel;
465 mRawDumpChannel = NULL;
466 }
467 if (mDummyBatchChannel) {
468 delete mDummyBatchChannel;
469 mDummyBatchChannel = NULL;
470 }
471 mPictureChannel = NULL;
472
473 if (mMetadataChannel) {
474 delete mMetadataChannel;
475 mMetadataChannel = NULL;
476 }
477
478 /* Clean up all channels */
479 if (mCameraInitialized) {
480 if(!mFirstConfiguration){
481 clear_metadata_buffer(mParameters);
482
483 // Check if there is still pending buffer not yet returned.
484 if (hasPendingBuffers) {
485 for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
486 ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
487 __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
488 pendingBuffer.stream->height);
489 }
490 ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
491 uint8_t restart = TRUE;
492 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
493 restart);
494 }
495
496 //send the last unconfigure
497 cam_stream_size_info_t stream_config_info;
498 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
499 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
500 stream_config_info.buffer_info.max_buffers =
501 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
502 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
503 stream_config_info);
504
505 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
506 if (rc < 0) {
507 ALOGE("%s: set_parms failed for unconfigure", __func__);
508 }
509 }
510 deinitParameters();
511 }
512
513 if (mChannelHandle) {
514 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
515 mChannelHandle);
516 ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
517 mChannelHandle = 0;
518 }
519
520 if (mCameraOpened)
521 closeCamera();
522
523 mPendingBuffersMap.mPendingBufferList.clear();
524 mPendingReprocessResultList.clear();
525 for (pendingRequestIterator i = mPendingRequestsList.begin();
526 i != mPendingRequestsList.end();) {
527 i = erasePendingRequest(i);
528 }
529 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
530 if (mDefaultMetadata[i])
531 free_camera_metadata(mDefaultMetadata[i]);
532
533 m_perfLock.lock_rel();
534 m_perfLock.lock_deinit();
535
536 pthread_cond_destroy(&mRequestCond);
537
538 pthread_mutex_destroy(&mMutex);
539
540 if (hasPendingBuffers) {
541 ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
542 " Exiting here...", __func__);
543 exit(EXIT_FAILURE);
544 }
545 CDBG("%s: X", __func__);
546 }
547
548 /*===========================================================================
549 * FUNCTION : erasePendingRequest
550 *
551 * DESCRIPTION: function to erase a desired pending request after freeing any
552 * allocated memory
553 *
554 * PARAMETERS :
555 * @i : iterator pointing to pending request to be erased
556 *
557 * RETURN : iterator pointing to the next request
558 *==========================================================================*/
559 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)560 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
561 {
562 if (i->input_buffer != NULL) {
563 free(i->input_buffer);
564 i->input_buffer = NULL;
565 }
566 if (i->settings != NULL)
567 free_camera_metadata((camera_metadata_t*)i->settings);
568 return mPendingRequestsList.erase(i);
569 }
570
571 /*===========================================================================
572 * FUNCTION : camEvtHandle
573 *
574 * DESCRIPTION: Function registered to mm-camera-interface to handle events
575 *
576 * PARAMETERS :
577 * @camera_handle : interface layer camera handle
578 * @evt : ptr to event
579 * @user_data : user data ptr
580 *
581 * RETURN : none
582 *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)583 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
584 mm_camera_event_t *evt,
585 void *user_data)
586 {
587 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
588 if (obj && evt) {
589 switch(evt->server_event_type) {
590 case CAM_EVENT_TYPE_DAEMON_DIED:
591 ALOGE("%s: Fatal, camera daemon died", __func__);
592 //close the camera backend
593 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
594 && obj->mCameraHandle->ops) {
595 obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
596 } else {
597 ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
598 __func__);
599 }
600 camera3_notify_msg_t notify_msg;
601 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
602 notify_msg.type = CAMERA3_MSG_ERROR;
603 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
604 notify_msg.message.error.error_stream = NULL;
605 notify_msg.message.error.frame_number = 0;
606 obj->mCallbackOps->notify(obj->mCallbackOps, ¬ify_msg);
607 break;
608
609 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
610 CDBG("%s: HAL got request pull from Daemon", __func__);
611 pthread_mutex_lock(&obj->mMutex);
612 obj->mWokenUpByDaemon = true;
613 obj->unblockRequestIfNecessary();
614 pthread_mutex_unlock(&obj->mMutex);
615 break;
616
617 default:
618 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
619 evt->server_event_type);
620 break;
621 }
622 } else {
623 ALOGE("%s: NULL user_data/evt", __func__);
624 }
625 }
626
627 /*===========================================================================
628 * FUNCTION : openCamera
629 *
630 * DESCRIPTION: open camera
631 *
632 * PARAMETERS :
633 * @hw_device : double ptr for camera device struct
634 *
635 * RETURN : int32_t type of status
636 * NO_ERROR -- success
637 * none-zero failure code
638 *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)639 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
640 {
641 int rc = 0;
642 if (mCameraOpened) {
643 *hw_device = NULL;
644 return PERMISSION_DENIED;
645 }
646 m_perfLock.lock_acq();
647 rc = openCamera();
648 if (rc == 0) {
649 *hw_device = &mCameraDevice.common;
650 } else
651 *hw_device = NULL;
652
653 m_perfLock.lock_rel();
654 return rc;
655 }
656
657 /*===========================================================================
658 * FUNCTION : openCamera
659 *
660 * DESCRIPTION: open camera
661 *
662 * PARAMETERS : none
663 *
664 * RETURN : int32_t type of status
665 * NO_ERROR -- success
666 * none-zero failure code
667 *==========================================================================*/
openCamera()668 int QCamera3HardwareInterface::openCamera()
669 {
670 int rc = 0;
671
672 ATRACE_CALL();
673 if (mCameraHandle) {
674 ALOGE("Failure: Camera already opened");
675 return ALREADY_EXISTS;
676 }
677
678 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
679 if (rc < 0) {
680 ALOGE("%s: Failed to reserve flash for camera id: %d",
681 __func__,
682 mCameraId);
683 return UNKNOWN_ERROR;
684 }
685
686 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
687 if (rc) {
688 ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
689 return rc;
690 }
691
692 mCameraOpened = true;
693
694 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
695 camEvtHandle, (void *)this);
696
697 if (rc < 0) {
698 ALOGE("%s: Error, failed to register event callback", __func__);
699 /* Not closing camera here since it is already handled in destructor */
700 return FAILED_TRANSACTION;
701 }
702 mFirstConfiguration = true;
703 return NO_ERROR;
704 }
705
706 /*===========================================================================
707 * FUNCTION : closeCamera
708 *
709 * DESCRIPTION: close camera
710 *
711 * PARAMETERS : none
712 *
713 * RETURN : int32_t type of status
714 * NO_ERROR -- success
715 * none-zero failure code
716 *==========================================================================*/
closeCamera()717 int QCamera3HardwareInterface::closeCamera()
718 {
719 ATRACE_CALL();
720 int rc = NO_ERROR;
721
722 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
723 mCameraHandle = NULL;
724 mCameraOpened = false;
725
726 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
727 CDBG("%s: Failed to release flash for camera id: %d",
728 __func__,
729 mCameraId);
730 }
731
732 return rc;
733 }
734
735 /*===========================================================================
736 * FUNCTION : initialize
737 *
738 * DESCRIPTION: Initialize frameworks callback functions
739 *
740 * PARAMETERS :
741 * @callback_ops : callback function to frameworks
742 *
743 * RETURN :
744 *
745 *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)746 int QCamera3HardwareInterface::initialize(
747 const struct camera3_callback_ops *callback_ops)
748 {
749 ATRACE_CALL();
750 int rc;
751
752 pthread_mutex_lock(&mMutex);
753
754 rc = initParameters();
755 if (rc < 0) {
756 ALOGE("%s: initParamters failed %d", __func__, rc);
757 goto err1;
758 }
759 mCallbackOps = callback_ops;
760
761 mChannelHandle = mCameraHandle->ops->add_channel(
762 mCameraHandle->camera_handle, NULL, NULL, this);
763 if (mChannelHandle == 0) {
764 ALOGE("%s: add_channel failed", __func__);
765 rc = -ENOMEM;
766 pthread_mutex_unlock(&mMutex);
767 return rc;
768 }
769
770 pthread_mutex_unlock(&mMutex);
771 mCameraInitialized = true;
772 return 0;
773
774 err1:
775 pthread_mutex_unlock(&mMutex);
776 return rc;
777 }
778
779 /*===========================================================================
780 * FUNCTION : validateStreamDimensions
781 *
782 * DESCRIPTION: Check if the configuration requested are those advertised
783 *
784 * PARAMETERS :
785 * @stream_list : streams to be configured
786 *
787 * RETURN :
788 *
789 *==========================================================================*/
validateStreamDimensions(camera3_stream_configuration_t * streamList)790 int QCamera3HardwareInterface::validateStreamDimensions(
791 camera3_stream_configuration_t *streamList)
792 {
793 int rc = NO_ERROR;
794 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
795 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
796 size_t count = 0;
797
798 camera3_stream_t *inputStream = NULL;
799 /*
800 * Loop through all streams to find input stream if it exists*
801 */
802 for (size_t i = 0; i< streamList->num_streams; i++) {
803 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
804 if (inputStream != NULL) {
805 ALOGE("%s: Error, Multiple input streams requested");
806 return -EINVAL;
807 }
808 inputStream = streamList->streams[i];
809 }
810 }
811 /*
812 * Loop through all streams requested in configuration
813 * Check if unsupported sizes have been requested on any of them
814 */
815 for (size_t j = 0; j < streamList->num_streams; j++) {
816 bool sizeFound = false;
817 size_t jpeg_sizes_cnt = 0;
818 camera3_stream_t *newStream = streamList->streams[j];
819
820 uint32_t rotatedHeight = newStream->height;
821 uint32_t rotatedWidth = newStream->width;
822 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
823 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
824 rotatedHeight = newStream->width;
825 rotatedWidth = newStream->height;
826 }
827
828 /*
829 * Sizes are different for each type of stream format check against
830 * appropriate table.
831 */
832 switch (newStream->format) {
833 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
834 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
835 case HAL_PIXEL_FORMAT_RAW10:
836 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
837 for (size_t i = 0; i < count; i++) {
838 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
839 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
840 sizeFound = true;
841 break;
842 }
843 }
844 break;
845 case HAL_PIXEL_FORMAT_BLOB:
846 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
847 /* Generate JPEG sizes table */
848 makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
849 count,
850 MAX_SIZES_CNT,
851 available_processed_sizes);
852 jpeg_sizes_cnt = filterJpegSizes(
853 available_jpeg_sizes,
854 available_processed_sizes,
855 count * 2,
856 MAX_SIZES_CNT * 2,
857 gCamCapability[mCameraId]->active_array_size,
858 gCamCapability[mCameraId]->max_downscale_factor);
859
860 /* Verify set size against generated sizes table */
861 for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
862 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
863 ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
864 sizeFound = true;
865 break;
866 }
867 }
868 break;
869 case HAL_PIXEL_FORMAT_YCbCr_420_888:
870 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
871 default:
872 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
873 || newStream->stream_type == CAMERA3_STREAM_INPUT
874 || IS_USAGE_ZSL(newStream->usage)) {
875 if (((int32_t)rotatedWidth ==
876 gCamCapability[mCameraId]->active_array_size.width) &&
877 ((int32_t)rotatedHeight ==
878 gCamCapability[mCameraId]->active_array_size.height)) {
879 sizeFound = true;
880 break;
881 }
882 /* We could potentially break here to enforce ZSL stream
883 * set from frameworks always is full active array size
884 * but it is not clear from the spc if framework will always
885 * follow that, also we have logic to override to full array
886 * size, so keeping the logic lenient at the moment
887 */
888 }
889 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
890 MAX_SIZES_CNT);
891 for (size_t i = 0; i < count; i++) {
892 if (((int32_t)rotatedWidth ==
893 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
894 ((int32_t)rotatedHeight ==
895 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
896 sizeFound = true;
897 break;
898 }
899 }
900 break;
901 } /* End of switch(newStream->format) */
902
903 /* We error out even if a single stream has unsupported size set */
904 if (!sizeFound) {
905 ALOGE("%s: Error: Unsupported size of %d x %d requested for stream"
906 "type:%d", __func__, rotatedWidth, rotatedHeight,
907 newStream->format);
908 ALOGE("%s: Active array size is %d x %d", __func__,
909 gCamCapability[mCameraId]->active_array_size.width,
910 gCamCapability[mCameraId]->active_array_size.height);
911 rc = -EINVAL;
912 break;
913 }
914 } /* End of for each stream */
915 return rc;
916 }
917
918 /*==============================================================================
919 * FUNCTION : isSupportChannelNeeded
920 *
921 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
922 *
923 * PARAMETERS :
924 * @stream_list : streams to be configured
925 * @stream_config_info : the config info for streams to be configured
926 *
927 * RETURN : Boolen true/false decision
928 *
929 *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)930 bool QCamera3HardwareInterface::isSupportChannelNeeded(
931 camera3_stream_configuration_t *streamList,
932 cam_stream_size_info_t stream_config_info)
933 {
934 uint32_t i;
935 bool pprocRequested = false;
936 /* Check for conditions where PProc pipeline does not have any streams*/
937 for (i = 0; i < stream_config_info.num_streams; i++) {
938 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
939 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
940 pprocRequested = true;
941 break;
942 }
943 }
944
945 if (pprocRequested == false )
946 return true;
947
948 /* Dummy stream needed if only raw or jpeg streams present */
949 for (i = 0; i < streamList->num_streams; i++) {
950 switch(streamList->streams[i]->format) {
951 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
952 case HAL_PIXEL_FORMAT_RAW10:
953 case HAL_PIXEL_FORMAT_RAW16:
954 case HAL_PIXEL_FORMAT_BLOB:
955 break;
956 default:
957 return false;
958 }
959 }
960 return true;
961 }
962
963 /*==============================================================================
964 * FUNCTION : getSensorOutputSize
965 *
966 * DESCRIPTION: Get sensor output size based on current stream configuratoin
967 *
968 * PARAMETERS :
969 * @sensor_dim : sensor output dimension (output)
970 *
971 * RETURN : int32_t type of status
972 * NO_ERROR -- success
973 * none-zero failure code
974 *
975 *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)976 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
977 {
978 int32_t rc = NO_ERROR;
979
980 cam_dimension_t max_dim = {0, 0};
981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
982 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
983 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
984 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
985 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
986 }
987
988 clear_metadata_buffer(mParameters);
989
990 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
991 max_dim);
992 if (rc != NO_ERROR) {
993 ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
994 return rc;
995 }
996
997 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
998 if (rc != NO_ERROR) {
999 ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
1000 return rc;
1001 }
1002
1003 clear_metadata_buffer(mParameters);
1004 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1005
1006 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1007 mParameters);
1008 if (rc != NO_ERROR) {
1009 ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
1010 return rc;
1011 }
1012
1013 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1014 ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
1015
1016 return rc;
1017 }
1018
1019 /*==============================================================================
1020 * FUNCTION : enablePowerHint
1021 *
1022 * DESCRIPTION: enable single powerhint for preview and different video modes.
1023 *
1024 * PARAMETERS :
1025 *
1026 * RETURN : NULL
1027 *
1028 *==========================================================================*/
enablePowerHint()1029 void QCamera3HardwareInterface::enablePowerHint()
1030 {
1031 if (!mPowerHintEnabled) {
1032 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1033 mPowerHintEnabled = true;
1034 }
1035 }
1036
1037 /*==============================================================================
1038 * FUNCTION : disablePowerHint
1039 *
1040 * DESCRIPTION: disable current powerhint.
1041 *
1042 * PARAMETERS :
1043 *
1044 * RETURN : NULL
1045 *
1046 *==========================================================================*/
disablePowerHint()1047 void QCamera3HardwareInterface::disablePowerHint()
1048 {
1049 if (mPowerHintEnabled) {
1050 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1051 mPowerHintEnabled = false;
1052 }
1053 }
1054
1055 /*===========================================================================
1056 * FUNCTION : configureStreams
1057 *
1058 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1059 * and output streams.
1060 *
1061 * PARAMETERS :
1062 * @stream_list : streams to be configured
1063 *
1064 * RETURN :
1065 *
1066 *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1067 int QCamera3HardwareInterface::configureStreams(
1068 camera3_stream_configuration_t *streamList)
1069 {
1070 ATRACE_CALL();
1071 int rc = 0;
1072
1073 // Acquire perfLock before configure streams
1074 m_perfLock.lock_acq();
1075 rc = configureStreamsPerfLocked(streamList);
1076 m_perfLock.lock_rel();
1077
1078 return rc;
1079 }
1080
1081 /*===========================================================================
1082 * FUNCTION : configureStreamsPerfLocked
1083 *
1084 * DESCRIPTION: configureStreams while perfLock is held.
1085 *
1086 * PARAMETERS :
1087 * @stream_list : streams to be configured
1088 *
1089 * RETURN : int32_t type of status
1090 * NO_ERROR -- success
1091 * none-zero failure code
1092 *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1093 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1094 camera3_stream_configuration_t *streamList)
1095 {
1096 ATRACE_CALL();
1097 int rc = 0;
1098
1099 // Sanity check stream_list
1100 if (streamList == NULL) {
1101 ALOGE("%s: NULL stream configuration", __func__);
1102 return BAD_VALUE;
1103 }
1104 if (streamList->streams == NULL) {
1105 ALOGE("%s: NULL stream list", __func__);
1106 return BAD_VALUE;
1107 }
1108
1109 if (streamList->num_streams < 1) {
1110 ALOGE("%s: Bad number of streams requested: %d", __func__,
1111 streamList->num_streams);
1112 return BAD_VALUE;
1113 }
1114
1115 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1116 ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1117 MAX_NUM_STREAMS, streamList->num_streams);
1118 return BAD_VALUE;
1119 }
1120
1121 mOpMode = streamList->operation_mode;
1122 CDBG("%s: mOpMode: %d", __func__, mOpMode);
1123
1124 /* first invalidate all the steams in the mStreamList
1125 * if they appear again, they will be validated */
1126 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1127 it != mStreamInfo.end(); it++) {
1128 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1129 if (channel) {
1130 channel->stop();
1131 }
1132 (*it)->status = INVALID;
1133 }
1134
1135 if (mRawDumpChannel) {
1136 mRawDumpChannel->stop();
1137 delete mRawDumpChannel;
1138 mRawDumpChannel = NULL;
1139 }
1140
1141 if (mSupportChannel)
1142 mSupportChannel->stop();
1143
1144 if (mAnalysisChannel) {
1145 mAnalysisChannel->stop();
1146 }
1147 if (mMetadataChannel) {
1148 /* If content of mStreamInfo is not 0, there is metadata stream */
1149 mMetadataChannel->stop();
1150 }
1151 if (mChannelHandle) {
1152 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1153 mChannelHandle);
1154 ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1155 }
1156
1157 pthread_mutex_lock(&mMutex);
1158
1159 /* Check whether we have video stream */
1160 m_bIs4KVideo = false;
1161 m_bIsVideo = false;
1162 m_bEisSupportedSize = false;
1163 m_bTnrEnabled = false;
1164 bool isZsl = false;
1165 uint32_t videoWidth = 0U;
1166 uint32_t videoHeight = 0U;
1167 size_t rawStreamCnt = 0;
1168 size_t stallStreamCnt = 0;
1169 size_t processedStreamCnt = 0;
1170 // Number of streams on ISP encoder path
1171 size_t numStreamsOnEncoder = 0;
1172 size_t numYuv888OnEncoder = 0;
1173 bool bYuv888OverrideJpeg = false;
1174 cam_dimension_t largeYuv888Size = {0, 0};
1175 cam_dimension_t maxViewfinderSize = {0, 0};
1176 bool bJpegExceeds4K = false;
1177 bool bJpegOnEncoder = false;
1178 bool bUseCommonFeatureMask = false;
1179 uint32_t commonFeatureMask = 0;
1180 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1181 camera3_stream_t *inputStream = NULL;
1182 bool isJpeg = false;
1183 cam_dimension_t jpegSize = {0, 0};
1184
1185 /*EIS configuration*/
1186 bool eisSupported = false;
1187 bool oisSupported = false;
1188 int32_t margin_index = -1;
1189 uint8_t eis_prop_set;
1190 uint32_t maxEisWidth = 0;
1191 uint32_t maxEisHeight = 0;
1192 int32_t hal_version = CAM_HAL_V3;
1193
1194 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1195
1196 size_t count = IS_TYPE_MAX;
1197 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1198 for (size_t i = 0; i < count; i++) {
1199 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1200 eisSupported = true;
1201 margin_index = (int32_t)i;
1202 break;
1203 }
1204 }
1205
1206 count = CAM_OPT_STAB_MAX;
1207 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1208 for (size_t i = 0; i < count; i++) {
1209 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1210 oisSupported = true;
1211 break;
1212 }
1213 }
1214
1215 if (eisSupported) {
1216 maxEisWidth = MAX_EIS_WIDTH;
1217 maxEisHeight = MAX_EIS_HEIGHT;
1218 }
1219
1220 /* EIS setprop control */
1221 char eis_prop[PROPERTY_VALUE_MAX];
1222 memset(eis_prop, 0, sizeof(eis_prop));
1223 property_get("persist.camera.eis.enable", eis_prop, "0");
1224 eis_prop_set = (uint8_t)atoi(eis_prop);
1225
1226 m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1227 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1228
1229 /* stream configurations */
1230 for (size_t i = 0; i < streamList->num_streams; i++) {
1231 camera3_stream_t *newStream = streamList->streams[i];
1232 ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1233 "height = %d, rotation = %d, usage = 0x%x",
1234 __func__, i, newStream->stream_type, newStream->format,
1235 newStream->width, newStream->height, newStream->rotation,
1236 newStream->usage);
1237 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1238 newStream->stream_type == CAMERA3_STREAM_INPUT){
1239 isZsl = true;
1240 }
1241 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1242 inputStream = newStream;
1243 }
1244
1245 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1246 isJpeg = true;
1247 jpegSize.width = newStream->width;
1248 jpegSize.height = newStream->height;
1249 if (newStream->width > VIDEO_4K_WIDTH ||
1250 newStream->height > VIDEO_4K_HEIGHT)
1251 bJpegExceeds4K = true;
1252 }
1253
1254 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1255 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1256 m_bIsVideo = true;
1257 videoWidth = newStream->width;
1258 videoHeight = newStream->height;
1259 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1260 (VIDEO_4K_HEIGHT <= newStream->height)) {
1261 m_bIs4KVideo = true;
1262 }
1263 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1264 (newStream->height <= maxEisHeight);
1265 }
1266 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1267 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1268 switch (newStream->format) {
1269 case HAL_PIXEL_FORMAT_BLOB:
1270 stallStreamCnt++;
1271 if (isOnEncoder(maxViewfinderSize, newStream->width,
1272 newStream->height)) {
1273 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1274 numStreamsOnEncoder++;
1275 bJpegOnEncoder = true;
1276 }
1277 break;
1278 case HAL_PIXEL_FORMAT_RAW10:
1279 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1280 case HAL_PIXEL_FORMAT_RAW16:
1281 rawStreamCnt++;
1282 break;
1283 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1284 processedStreamCnt++;
1285 if (isOnEncoder(maxViewfinderSize, newStream->width,
1286 newStream->height)) {
1287 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1288 IS_USAGE_ZSL(newStream->usage)) {
1289 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1290 } else {
1291 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1292 }
1293 numStreamsOnEncoder++;
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 processedStreamCnt++;
1298 if (isOnEncoder(maxViewfinderSize, newStream->width,
1299 newStream->height)) {
1300 // If Yuv888 size is not greater than 4K, set feature mask
1301 // to SUPERSET so that it support concurrent request on
1302 // YUV and JPEG.
1303 if (newStream->width <= VIDEO_4K_WIDTH &&
1304 newStream->height <= VIDEO_4K_HEIGHT) {
1305 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1306 } else {
1307 commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1308 }
1309 numStreamsOnEncoder++;
1310 numYuv888OnEncoder++;
1311 largeYuv888Size.width = newStream->width;
1312 largeYuv888Size.height = newStream->height;
1313 }
1314 break;
1315 default:
1316 processedStreamCnt++;
1317 if (isOnEncoder(maxViewfinderSize, newStream->width,
1318 newStream->height)) {
1319 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1320 numStreamsOnEncoder++;
1321 }
1322 break;
1323 }
1324
1325 }
1326 }
1327
1328 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1329 !m_bIsVideo) {
1330 m_bEisEnable = false;
1331 }
1332
1333 /* Logic to enable/disable TNR based on specific config size/etc.*/
1334 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1335 ((videoWidth == 1920 && videoHeight == 1080) ||
1336 (videoWidth == 1280 && videoHeight == 720)) &&
1337 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1338 m_bTnrEnabled = true;
1339
1340 /* Check if num_streams is sane */
1341 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1342 rawStreamCnt > MAX_RAW_STREAMS ||
1343 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1344 ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1345 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1346 pthread_mutex_unlock(&mMutex);
1347 return -EINVAL;
1348 }
1349 /* Check whether we have zsl stream or 4k video case */
1350 if (isZsl && m_bIsVideo) {
1351 ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1352 pthread_mutex_unlock(&mMutex);
1353 return -EINVAL;
1354 }
1355 /* Check if stream sizes are sane */
1356 if (numStreamsOnEncoder > 2) {
1357 ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1358 __func__);
1359 pthread_mutex_unlock(&mMutex);
1360 return -EINVAL;
1361 } else if (1 < numStreamsOnEncoder){
1362 bUseCommonFeatureMask = true;
1363 CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1364 __func__);
1365 }
1366
1367 /* Check if BLOB size is greater than 4k in 4k recording case */
1368 if (m_bIs4KVideo && bJpegExceeds4K) {
1369 ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1370 __func__);
1371 pthread_mutex_unlock(&mMutex);
1372 return -EINVAL;
1373 }
1374
1375 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1376 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1377 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1378 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1379 // configurations:
1380 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1381 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1382 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1383 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1384 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1385 __func__);
1386 pthread_mutex_unlock(&mMutex);
1387 return -EINVAL;
1388 }
1389
1390 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1391 // the YUV stream's size is greater or equal to the JPEG size, set common
1392 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1393 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1394 jpegSize.width, jpegSize.height) &&
1395 largeYuv888Size.width > jpegSize.width &&
1396 largeYuv888Size.height > jpegSize.height) {
1397 bYuv888OverrideJpeg = true;
1398 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1399 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1400 }
1401
1402 rc = validateStreamDimensions(streamList);
1403 if (rc == NO_ERROR) {
1404 rc = validateStreamRotations(streamList);
1405 }
1406 if (rc != NO_ERROR) {
1407 ALOGE("%s: Invalid stream configuration requested!", __func__);
1408 pthread_mutex_unlock(&mMutex);
1409 return rc;
1410 }
1411
1412 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1413 camera3_stream_t *jpegStream = NULL;
1414 for (size_t i = 0; i < streamList->num_streams; i++) {
1415 camera3_stream_t *newStream = streamList->streams[i];
1416 CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1417 "stream size : %d x %d, stream rotation = %d",
1418 __func__, newStream->stream_type, newStream->format,
1419 newStream->width, newStream->height, newStream->rotation);
1420 //if the stream is in the mStreamList validate it
1421 bool stream_exists = false;
1422 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1423 it != mStreamInfo.end(); it++) {
1424 if ((*it)->stream == newStream) {
1425 QCamera3ProcessingChannel *channel =
1426 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1427 stream_exists = true;
1428 if (channel)
1429 delete channel;
1430 (*it)->status = VALID;
1431 (*it)->stream->priv = NULL;
1432 (*it)->channel = NULL;
1433 }
1434 }
1435 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1436 //new stream
1437 stream_info_t* stream_info;
1438 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1439 if (!stream_info) {
1440 ALOGE("%s: Could not allocate stream info", __func__);
1441 rc = -ENOMEM;
1442 pthread_mutex_unlock(&mMutex);
1443 return rc;
1444 }
1445 stream_info->stream = newStream;
1446 stream_info->status = VALID;
1447 stream_info->channel = NULL;
1448 mStreamInfo.push_back(stream_info);
1449 }
1450 /* Covers Opaque ZSL and API1 F/W ZSL */
1451 if (IS_USAGE_ZSL(newStream->usage)
1452 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1453 if (zslStream != NULL) {
1454 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1455 pthread_mutex_unlock(&mMutex);
1456 return BAD_VALUE;
1457 }
1458 zslStream = newStream;
1459 }
1460 /* Covers YUV reprocess */
1461 if (inputStream != NULL) {
1462 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1463 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1464 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1465 && inputStream->width == newStream->width
1466 && inputStream->height == newStream->height) {
1467 if (zslStream != NULL) {
1468 /* This scenario indicates multiple YUV streams with same size
1469 * as input stream have been requested, since zsl stream handle
1470 * is solely use for the purpose of overriding the size of streams
1471 * which share h/w streams we will just make a guess here as to
1472 * which of the stream is a ZSL stream, this will be refactored
1473 * once we make generic logic for streams sharing encoder output
1474 */
1475 CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1476 }
1477 zslStream = newStream;
1478 }
1479 }
1480 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1481 jpegStream = newStream;
1482 }
1483 }
1484
1485 /* If a zsl stream is set, we know that we have configured at least one input or
1486 bidirectional stream */
1487 if (NULL != zslStream) {
1488 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1489 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1490 mInputStreamInfo.format = zslStream->format;
1491 mInputStreamInfo.usage = zslStream->usage;
1492 CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1493 __func__, mInputStreamInfo.dim.width,
1494 mInputStreamInfo.dim.height,
1495 mInputStreamInfo.format, mInputStreamInfo.usage);
1496 }
1497
1498 cleanAndSortStreamInfo();
1499 if (mMetadataChannel) {
1500 delete mMetadataChannel;
1501 mMetadataChannel = NULL;
1502 }
1503 if (mSupportChannel) {
1504 delete mSupportChannel;
1505 mSupportChannel = NULL;
1506 }
1507
1508 if (mAnalysisChannel) {
1509 delete mAnalysisChannel;
1510 mAnalysisChannel = NULL;
1511 }
1512
1513 if (mDummyBatchChannel) {
1514 delete mDummyBatchChannel;
1515 mDummyBatchChannel = NULL;
1516 }
1517
1518 //Create metadata channel and initialize it
1519 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1520 mChannelHandle, mCameraHandle->ops, captureResultCb,
1521 &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1522 if (mMetadataChannel == NULL) {
1523 ALOGE("%s: failed to allocate metadata channel", __func__);
1524 rc = -ENOMEM;
1525 pthread_mutex_unlock(&mMutex);
1526 return rc;
1527 }
1528 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1529 if (rc < 0) {
1530 ALOGE("%s: metadata channel initialization failed", __func__);
1531 delete mMetadataChannel;
1532 mMetadataChannel = NULL;
1533 pthread_mutex_unlock(&mMutex);
1534 return rc;
1535 }
1536
1537 // Create analysis stream all the time, even when h/w support is not available
1538 {
1539 mAnalysisChannel = new QCamera3SupportChannel(
1540 mCameraHandle->camera_handle,
1541 mChannelHandle,
1542 mCameraHandle->ops,
1543 &gCamCapability[mCameraId]->padding_info,
1544 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1545 CAM_STREAM_TYPE_ANALYSIS,
1546 &gCamCapability[mCameraId]->analysis_recommended_res,
1547 gCamCapability[mCameraId]->analysis_recommended_format,
1548 this,
1549 0); // force buffer count to 0
1550 if (!mAnalysisChannel) {
1551 ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1552 pthread_mutex_unlock(&mMutex);
1553 return -ENOMEM;
1554 }
1555 }
1556
1557 bool isRawStreamRequested = false;
1558 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1559 /* Allocate channel objects for the requested streams */
1560 for (size_t i = 0; i < streamList->num_streams; i++) {
1561 camera3_stream_t *newStream = streamList->streams[i];
1562 uint32_t stream_usage = newStream->usage;
1563 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1564 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1565 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1566 || IS_USAGE_ZSL(newStream->usage)) &&
1567 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1568 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1569 if (bUseCommonFeatureMask) {
1570 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1571 commonFeatureMask;
1572 } else {
1573 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1574 CAM_QCOM_FEATURE_NONE;
1575 }
1576
1577 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1578 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1579 } else {
1580 //for non zsl streams find out the format
1581 switch (newStream->format) {
1582 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1583 {
1584 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1585 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1586
1587 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1588
1589 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1590 if (m_bTnrEnabled && m_bTnrVideo) {
1591 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1592 CAM_QCOM_FEATURE_CPP_TNR;
1593 }
1594
1595 } else {
1596
1597 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1598 if (m_bTnrEnabled && m_bTnrPreview) {
1599 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1600 CAM_QCOM_FEATURE_CPP_TNR;
1601 }
1602 }
1603
1604 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1605 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1606 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1607 newStream->height;
1608 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1609 newStream->width;
1610 }
1611 }
1612 break;
1613 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1614 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1615 if (isOnEncoder(maxViewfinderSize, newStream->width,
1616 newStream->height)) {
1617 if (bUseCommonFeatureMask)
1618 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1619 commonFeatureMask;
1620 else
1621 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1622 CAM_QCOM_FEATURE_NONE;
1623 } else {
1624 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1625 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1626 }
1627 break;
1628 case HAL_PIXEL_FORMAT_BLOB:
1629 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1630 if (m_bIs4KVideo && !isZsl) {
1631 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1632 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1633 } else {
1634 if (bUseCommonFeatureMask &&
1635 isOnEncoder(maxViewfinderSize, newStream->width,
1636 newStream->height)) {
1637 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1638 } else {
1639 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1640 }
1641 }
1642 if (isZsl) {
1643 if (zslStream) {
1644 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1645 (int32_t)zslStream->width;
1646 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1647 (int32_t)zslStream->height;
1648 } else {
1649 ALOGE("%s: Error, No ZSL stream identified",__func__);
1650 pthread_mutex_unlock(&mMutex);
1651 return -EINVAL;
1652 }
1653 } else if (m_bIs4KVideo) {
1654 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1655 (int32_t)videoWidth;
1656 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1657 (int32_t)videoHeight;
1658 } else if (bYuv888OverrideJpeg) {
1659 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1660 (int32_t)largeYuv888Size.width;
1661 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1662 (int32_t)largeYuv888Size.height;
1663 }
1664 break;
1665 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1666 case HAL_PIXEL_FORMAT_RAW16:
1667 case HAL_PIXEL_FORMAT_RAW10:
1668 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1669 isRawStreamRequested = true;
1670 break;
1671 default:
1672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1673 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1674 break;
1675 }
1676
1677 }
1678
1679 if (newStream->priv == NULL) {
1680 //New stream, construct channel
1681 switch (newStream->stream_type) {
1682 case CAMERA3_STREAM_INPUT:
1683 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1684 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1685 break;
1686 case CAMERA3_STREAM_BIDIRECTIONAL:
1687 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1688 GRALLOC_USAGE_HW_CAMERA_WRITE;
1689 break;
1690 case CAMERA3_STREAM_OUTPUT:
1691 /* For video encoding stream, set read/write rarely
1692 * flag so that they may be set to un-cached */
1693 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1694 newStream->usage |=
1695 (GRALLOC_USAGE_SW_READ_RARELY |
1696 GRALLOC_USAGE_SW_WRITE_RARELY |
1697 GRALLOC_USAGE_HW_CAMERA_WRITE);
1698 else if (IS_USAGE_ZSL(newStream->usage))
1699 CDBG("%s: ZSL usage flag skipping", __func__);
1700 else if (newStream == zslStream
1701 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1702 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1703 } else
1704 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1705 break;
1706 default:
1707 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1708 break;
1709 }
1710
1711 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1712 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1713 QCamera3ProcessingChannel *channel = NULL;
1714 switch (newStream->format) {
1715 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1716 if ((newStream->usage &
1717 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1718 (streamList->operation_mode ==
1719 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1720 ) {
1721 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1722 mChannelHandle, mCameraHandle->ops, captureResultCb,
1723 &gCamCapability[mCameraId]->padding_info,
1724 this,
1725 newStream,
1726 (cam_stream_type_t)
1727 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1728 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1729 mMetadataChannel,
1730 0); //heap buffers are not required for HFR video channel
1731 if (channel == NULL) {
1732 ALOGE("%s: allocation of channel failed", __func__);
1733 pthread_mutex_unlock(&mMutex);
1734 return -ENOMEM;
1735 }
1736 //channel->getNumBuffers() will return 0 here so use
1737 //MAX_INFLIGH_HFR_REQUESTS
1738 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1739 newStream->priv = channel;
1740 ALOGI("%s: num video buffers in HFR mode: %d",
1741 __func__, MAX_INFLIGHT_HFR_REQUESTS);
1742 } else {
1743 /* Copy stream contents in HFR preview only case to create
1744 * dummy batch channel so that sensor streaming is in
1745 * HFR mode */
1746 if (!m_bIsVideo && (streamList->operation_mode ==
1747 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1748 mDummyBatchStream = *newStream;
1749 }
1750 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1751 mChannelHandle, mCameraHandle->ops, captureResultCb,
1752 &gCamCapability[mCameraId]->padding_info,
1753 this,
1754 newStream,
1755 (cam_stream_type_t)
1756 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1757 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1758 mMetadataChannel,
1759 MAX_INFLIGHT_REQUESTS);
1760 if (channel == NULL) {
1761 ALOGE("%s: allocation of channel failed", __func__);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENOMEM;
1764 }
1765 newStream->max_buffers = channel->getNumBuffers();
1766 newStream->priv = channel;
1767 }
1768 break;
1769 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1770 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1771 mChannelHandle,
1772 mCameraHandle->ops, captureResultCb,
1773 &gCamCapability[mCameraId]->padding_info,
1774 this,
1775 newStream,
1776 (cam_stream_type_t)
1777 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1778 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1779 mMetadataChannel);
1780 if (channel == NULL) {
1781 ALOGE("%s: allocation of YUV channel failed", __func__);
1782 pthread_mutex_unlock(&mMutex);
1783 return -ENOMEM;
1784 }
1785 newStream->max_buffers = channel->getNumBuffers();
1786 newStream->priv = channel;
1787 break;
1788 }
1789 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1790 case HAL_PIXEL_FORMAT_RAW16:
1791 case HAL_PIXEL_FORMAT_RAW10:
1792 mRawChannel = new QCamera3RawChannel(
1793 mCameraHandle->camera_handle, mChannelHandle,
1794 mCameraHandle->ops, captureResultCb,
1795 &gCamCapability[mCameraId]->padding_info,
1796 this, newStream, CAM_QCOM_FEATURE_NONE,
1797 mMetadataChannel,
1798 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1799 if (mRawChannel == NULL) {
1800 ALOGE("%s: allocation of raw channel failed", __func__);
1801 pthread_mutex_unlock(&mMutex);
1802 return -ENOMEM;
1803 }
1804 newStream->max_buffers = mRawChannel->getNumBuffers();
1805 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1806 break;
1807 case HAL_PIXEL_FORMAT_BLOB:
1808 // Max live snapshot inflight buffer is 1. This is to mitigate
1809 // frame drop issues for video snapshot. The more buffers being
1810 // allocated, the more frame drops there are.
1811 mPictureChannel = new QCamera3PicChannel(
1812 mCameraHandle->camera_handle, mChannelHandle,
1813 mCameraHandle->ops, captureResultCb,
1814 &gCamCapability[mCameraId]->padding_info, this, newStream,
1815 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1816 m_bIs4KVideo, isZsl, mMetadataChannel,
1817 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1818 if (mPictureChannel == NULL) {
1819 ALOGE("%s: allocation of channel failed", __func__);
1820 pthread_mutex_unlock(&mMutex);
1821 return -ENOMEM;
1822 }
1823 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1824 newStream->max_buffers = mPictureChannel->getNumBuffers();
1825 mPictureChannel->overrideYuvSize(
1826 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1827 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1828 break;
1829
1830 default:
1831 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1832 break;
1833 }
1834 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1835 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1836 } else {
1837 ALOGE("%s: Error, Unknown stream type", __func__);
1838 return -EINVAL;
1839 }
1840
1841 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1842 it != mStreamInfo.end(); it++) {
1843 if ((*it)->stream == newStream) {
1844 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1845 break;
1846 }
1847 }
1848 } else {
1849 // Channel already exists for this stream
1850 // Do nothing for now
1851 }
1852
1853 /* Do not add entries for input stream in metastream info
1854 * since there is no real stream associated with it
1855 */
1856 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1857 mStreamConfigInfo.num_streams++;
1858 }
1859
1860 //RAW DUMP channel
1861 if (mEnableRawDump && isRawStreamRequested == false){
1862 cam_dimension_t rawDumpSize;
1863 rawDumpSize = getMaxRawSize(mCameraId);
1864 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1865 mChannelHandle,
1866 mCameraHandle->ops,
1867 rawDumpSize,
1868 &gCamCapability[mCameraId]->padding_info,
1869 this, CAM_QCOM_FEATURE_NONE);
1870 if (!mRawDumpChannel) {
1871 ALOGE("%s: Raw Dump channel cannot be created", __func__);
1872 pthread_mutex_unlock(&mMutex);
1873 return -ENOMEM;
1874 }
1875 }
1876
1877
1878 if (mAnalysisChannel) {
1879 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1880 gCamCapability[mCameraId]->analysis_recommended_res;
1881 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1882 CAM_STREAM_TYPE_ANALYSIS;
1883 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1884 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1885 mStreamConfigInfo.num_streams++;
1886 }
1887
1888 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1889 mSupportChannel = new QCamera3SupportChannel(
1890 mCameraHandle->camera_handle,
1891 mChannelHandle,
1892 mCameraHandle->ops,
1893 &gCamCapability[mCameraId]->padding_info,
1894 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1895 CAM_STREAM_TYPE_CALLBACK,
1896 &QCamera3SupportChannel::kDim,
1897 CAM_FORMAT_YUV_420_NV21,
1898 this);
1899 if (!mSupportChannel) {
1900 ALOGE("%s: dummy channel cannot be created", __func__);
1901 pthread_mutex_unlock(&mMutex);
1902 return -ENOMEM;
1903 }
1904 }
1905
1906 if (mSupportChannel) {
1907 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1908 QCamera3SupportChannel::kDim;
1909 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1910 CAM_STREAM_TYPE_CALLBACK;
1911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1912 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 mStreamConfigInfo.num_streams++;
1914 }
1915
1916 if (mRawDumpChannel) {
1917 cam_dimension_t rawSize;
1918 rawSize = getMaxRawSize(mCameraId);
1919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1920 rawSize;
1921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1922 CAM_STREAM_TYPE_RAW;
1923 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1924 CAM_QCOM_FEATURE_NONE;
1925 mStreamConfigInfo.num_streams++;
1926 }
1927 /* In HFR mode, if video stream is not added, create a dummy channel so that
1928 * ISP can create a batch mode even for preview only case. This channel is
1929 * never 'start'ed (no stream-on), it is only 'initialized' */
1930 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1931 !m_bIsVideo) {
1932 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1933 mChannelHandle,
1934 mCameraHandle->ops, captureResultCb,
1935 &gCamCapability[mCameraId]->padding_info,
1936 this,
1937 &mDummyBatchStream,
1938 CAM_STREAM_TYPE_VIDEO,
1939 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1940 mMetadataChannel);
1941 if (NULL == mDummyBatchChannel) {
1942 ALOGE("%s: creation of mDummyBatchChannel failed."
1943 "Preview will use non-hfr sensor mode ", __func__);
1944 }
1945 }
1946 if (mDummyBatchChannel) {
1947 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1948 mDummyBatchStream.width;
1949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1950 mDummyBatchStream.height;
1951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1952 CAM_STREAM_TYPE_VIDEO;
1953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1954 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1955 mStreamConfigInfo.num_streams++;
1956 }
1957
1958 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1959 mStreamConfigInfo.buffer_info.max_buffers =
1960 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1961
1962 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1963 for (pendingRequestIterator i = mPendingRequestsList.begin();
1964 i != mPendingRequestsList.end();) {
1965 i = erasePendingRequest(i);
1966 }
1967 mPendingFrameDropList.clear();
1968 // Initialize/Reset the pending buffers list
1969 mPendingBuffersMap.num_buffers = 0;
1970 mPendingBuffersMap.mPendingBufferList.clear();
1971 mPendingReprocessResultList.clear();
1972
1973 mFirstRequest = true;
1974 mCurJpegMeta.clear();
1975 //Get min frame duration for this streams configuration
1976 deriveMinFrameDuration();
1977
1978 /* Turn on video hint only if video stream is configured */
1979
1980 pthread_mutex_unlock(&mMutex);
1981
1982 return rc;
1983 }
1984
1985 /*===========================================================================
1986 * FUNCTION : validateCaptureRequest
1987 *
1988 * DESCRIPTION: validate a capture request from camera service
1989 *
1990 * PARAMETERS :
1991 * @request : request from framework to process
1992 *
1993 * RETURN :
1994 *
1995 *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)1996 int QCamera3HardwareInterface::validateCaptureRequest(
1997 camera3_capture_request_t *request)
1998 {
1999 ssize_t idx = 0;
2000 const camera3_stream_buffer_t *b;
2001 CameraMetadata meta;
2002
2003 /* Sanity check the request */
2004 if (request == NULL) {
2005 ALOGE("%s: NULL capture request", __func__);
2006 return BAD_VALUE;
2007 }
2008
2009 if (request->settings == NULL && mFirstRequest) {
2010 /*settings cannot be null for the first request*/
2011 return BAD_VALUE;
2012 }
2013
2014 uint32_t frameNumber = request->frame_number;
2015 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2016 ALOGE("%s: Request %d: No output buffers provided!",
2017 __FUNCTION__, frameNumber);
2018 return BAD_VALUE;
2019 }
2020 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2021 ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
2022 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
2023 return BAD_VALUE;
2024 }
2025 if (request->input_buffer != NULL) {
2026 b = request->input_buffer;
2027 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2028 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2029 __func__, frameNumber, (long)idx);
2030 return BAD_VALUE;
2031 }
2032 if (b->release_fence != -1) {
2033 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2034 __func__, frameNumber, (long)idx);
2035 return BAD_VALUE;
2036 }
2037 if (b->buffer == NULL) {
2038 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2039 __func__, frameNumber, (long)idx);
2040 return BAD_VALUE;
2041 }
2042 }
2043
2044 // Validate all buffers
2045 b = request->output_buffers;
2046 do {
2047 QCamera3ProcessingChannel *channel =
2048 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2049 if (channel == NULL) {
2050 ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2051 __func__, frameNumber, (long)idx);
2052 return BAD_VALUE;
2053 }
2054 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2055 ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2056 __func__, frameNumber, (long)idx);
2057 return BAD_VALUE;
2058 }
2059 if (b->release_fence != -1) {
2060 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2061 __func__, frameNumber, (long)idx);
2062 return BAD_VALUE;
2063 }
2064 if (b->buffer == NULL) {
2065 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2066 __func__, frameNumber, (long)idx);
2067 return BAD_VALUE;
2068 }
2069 if (*(b->buffer) == NULL) {
2070 ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2071 __func__, frameNumber, (long)idx);
2072 return BAD_VALUE;
2073 }
2074 idx++;
2075 b = request->output_buffers + idx;
2076 } while (idx < (ssize_t)request->num_output_buffers);
2077
2078 return NO_ERROR;
2079 }
2080
2081 /*===========================================================================
2082 * FUNCTION : deriveMinFrameDuration
2083 *
2084 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2085 * on currently configured streams.
2086 *
2087 * PARAMETERS : NONE
2088 *
2089 * RETURN : NONE
2090 *
2091 *==========================================================================*/
deriveMinFrameDuration()2092 void QCamera3HardwareInterface::deriveMinFrameDuration()
2093 {
2094 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2095
2096 maxJpegDim = 0;
2097 maxProcessedDim = 0;
2098 maxRawDim = 0;
2099
2100 // Figure out maximum jpeg, processed, and raw dimensions
2101 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2102 it != mStreamInfo.end(); it++) {
2103
2104 // Input stream doesn't have valid stream_type
2105 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2106 continue;
2107
2108 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2109 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2110 if (dimension > maxJpegDim)
2111 maxJpegDim = dimension;
2112 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2113 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2114 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2115 if (dimension > maxRawDim)
2116 maxRawDim = dimension;
2117 } else {
2118 if (dimension > maxProcessedDim)
2119 maxProcessedDim = dimension;
2120 }
2121 }
2122
2123 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2124 MAX_SIZES_CNT);
2125
2126 //Assume all jpeg dimensions are in processed dimensions.
2127 if (maxJpegDim > maxProcessedDim)
2128 maxProcessedDim = maxJpegDim;
2129 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2130 if (maxProcessedDim > maxRawDim) {
2131 maxRawDim = INT32_MAX;
2132
2133 for (size_t i = 0; i < count; i++) {
2134 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2135 gCamCapability[mCameraId]->raw_dim[i].height;
2136 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2137 maxRawDim = dimension;
2138 }
2139 }
2140
2141 //Find minimum durations for processed, jpeg, and raw
2142 for (size_t i = 0; i < count; i++) {
2143 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2144 gCamCapability[mCameraId]->raw_dim[i].height) {
2145 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2146 break;
2147 }
2148 }
2149 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2150 for (size_t i = 0; i < count; i++) {
2151 if (maxProcessedDim ==
2152 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2153 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2154 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2155 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2156 break;
2157 }
2158 }
2159 }
2160
2161 /*===========================================================================
2162 * FUNCTION : getMinFrameDuration
2163 *
2164 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2165 * and current request configuration.
2166 *
2167 * PARAMETERS : @request: requset sent by the frameworks
2168 *
2169 * RETURN : min farme duration for a particular request
2170 *
2171 *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2172 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2173 {
2174 bool hasJpegStream = false;
2175 bool hasRawStream = false;
2176 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2177 const camera3_stream_t *stream = request->output_buffers[i].stream;
2178 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2179 hasJpegStream = true;
2180 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2181 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2182 stream->format == HAL_PIXEL_FORMAT_RAW16)
2183 hasRawStream = true;
2184 }
2185
2186 if (!hasJpegStream)
2187 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2188 else
2189 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2190 }
2191
2192 /*===========================================================================
2193 * FUNCTION : handlePendingReprocResults
2194 *
2195 * DESCRIPTION: check and notify on any pending reprocess results
2196 *
2197 * PARAMETERS :
2198 * @frame_number : Pending request frame number
2199 *
2200 * RETURN : int32_t type of status
2201 * NO_ERROR -- success
2202 * none-zero failure code
2203 *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)2204 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2205 {
2206 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2207 j != mPendingReprocessResultList.end(); j++) {
2208 if (j->frame_number == frame_number) {
2209 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2210
2211 CDBG("%s: Delayed reprocess notify %d", __func__,
2212 frame_number);
2213
2214 for (pendingRequestIterator k = mPendingRequestsList.begin();
2215 k != mPendingRequestsList.end(); k++) {
2216
2217 if (k->frame_number == j->frame_number) {
2218 CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2219 "Take it out!!", __func__,
2220 k->frame_number);
2221
2222 camera3_capture_result result;
2223 memset(&result, 0, sizeof(camera3_capture_result));
2224 result.frame_number = frame_number;
2225 result.num_output_buffers = 1;
2226 result.output_buffers = &j->buffer;
2227 result.input_buffer = k->input_buffer;
2228 result.result = k->settings;
2229 result.partial_result = PARTIAL_RESULT_COUNT;
2230 mCallbackOps->process_capture_result(mCallbackOps, &result);
2231
2232 erasePendingRequest(k);
2233 break;
2234 }
2235 }
2236 mPendingReprocessResultList.erase(j);
2237 break;
2238 }
2239 }
2240 return NO_ERROR;
2241 }
2242
2243 /*===========================================================================
2244 * FUNCTION : handleBatchMetadata
2245 *
2246 * DESCRIPTION: Handles metadata buffer callback in batch mode
2247 *
2248 * PARAMETERS : @metadata_buf: metadata buffer
2249 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2250 * the meta buf in this method
2251 *
2252 * RETURN :
2253 *
2254 *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)2255 void QCamera3HardwareInterface::handleBatchMetadata(
2256 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2257 {
2258 ATRACE_CALL();
2259
2260 if (NULL == metadata_buf) {
2261 ALOGE("%s: metadata_buf is NULL", __func__);
2262 return;
2263 }
2264 /* In batch mode, the metdata will contain the frame number and timestamp of
2265 * the last frame in the batch. Eg: a batch containing buffers from request
2266 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2267 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2268 * multiple process_capture_results */
2269 metadata_buffer_t *metadata =
2270 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2271 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2272 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2273 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2274 uint32_t frame_number = 0, urgent_frame_number = 0;
2275 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2276 bool invalid_metadata = false;
2277 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2278 size_t loopCount = 1;
2279
2280 int32_t *p_frame_number_valid =
2281 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2282 uint32_t *p_frame_number =
2283 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2284 int64_t *p_capture_time =
2285 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2286 int32_t *p_urgent_frame_number_valid =
2287 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2288 uint32_t *p_urgent_frame_number =
2289 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2290
2291 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2292 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2293 (NULL == p_urgent_frame_number)) {
2294 ALOGE("%s: Invalid metadata", __func__);
2295 invalid_metadata = true;
2296 } else {
2297 frame_number_valid = *p_frame_number_valid;
2298 last_frame_number = *p_frame_number;
2299 last_frame_capture_time = *p_capture_time;
2300 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2301 last_urgent_frame_number = *p_urgent_frame_number;
2302 }
2303
2304 /* In batchmode, when no video buffers are requested, set_parms are sent
2305 * for every capture_request. The difference between consecutive urgent
2306 * frame numbers and frame numbers should be used to interpolate the
2307 * corresponding frame numbers and time stamps */
2308 pthread_mutex_lock(&mMutex);
2309 if (urgent_frame_number_valid) {
2310 first_urgent_frame_number =
2311 mPendingBatchMap.valueFor(last_urgent_frame_number);
2312 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2313 first_urgent_frame_number;
2314
2315 CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
2316 __func__, urgent_frame_number_valid,
2317 first_urgent_frame_number, last_urgent_frame_number);
2318 }
2319
2320 if (frame_number_valid) {
2321 first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2322 frameNumDiff = last_frame_number + 1 -
2323 first_frame_number;
2324 mPendingBatchMap.removeItem(last_frame_number);
2325
2326 CDBG_HIGH("%s: frm: valid: %d frm_num: %d - %d",
2327 __func__, frame_number_valid,
2328 first_frame_number, last_frame_number);
2329
2330 }
2331 pthread_mutex_unlock(&mMutex);
2332
2333 if (urgent_frame_number_valid || frame_number_valid) {
2334 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2335 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2336 ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2337 __func__, urgentFrameNumDiff, last_urgent_frame_number);
2338 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2339 ALOGE("%s: frameNumDiff: %d frameNum: %d",
2340 __func__, frameNumDiff, last_frame_number);
2341 }
2342
2343 for (size_t i = 0; i < loopCount; i++) {
2344 /* handleMetadataWithLock is called even for invalid_metadata for
2345 * pipeline depth calculation */
2346 if (!invalid_metadata) {
2347 /* Infer frame number. Batch metadata contains frame number of the
2348 * last frame */
2349 if (urgent_frame_number_valid) {
2350 if (i < urgentFrameNumDiff) {
2351 urgent_frame_number =
2352 first_urgent_frame_number + i;
2353 CDBG("%s: inferred urgent frame_number: %d",
2354 __func__, urgent_frame_number);
2355 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2356 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2357 } else {
2358 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2359 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2360 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2361 }
2362 }
2363
2364 /* Infer frame number. Batch metadata contains frame number of the
2365 * last frame */
2366 if (frame_number_valid) {
2367 if (i < frameNumDiff) {
2368 frame_number = first_frame_number + i;
2369 CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2370 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2371 CAM_INTF_META_FRAME_NUMBER, frame_number);
2372 } else {
2373 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2374 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2375 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2376 }
2377 }
2378
2379 if (last_frame_capture_time) {
2380 //Infer timestamp
2381 first_frame_capture_time = last_frame_capture_time -
2382 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2383 capture_time =
2384 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2385 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2386 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2387 CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
2388 __func__, last_frame_capture_time, capture_time);
2389 }
2390 }
2391 pthread_mutex_lock(&mMutex);
2392 handleMetadataWithLock(metadata_buf,
2393 false /* free_and_bufdone_meta_buf */,
2394 (i == 0) /* first metadata in the batch metadata */);
2395 pthread_mutex_unlock(&mMutex);
2396 }
2397
2398 done_batch_metadata:
2399 /* BufDone metadata buffer */
2400 if (free_and_bufdone_meta_buf) {
2401 mMetadataChannel->bufDone(metadata_buf);
2402 free(metadata_buf);
2403 }
2404 }
2405
2406 /*===========================================================================
2407 * FUNCTION : handleMetadataWithLock
2408 *
2409 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2410 *
2411 * PARAMETERS : @metadata_buf: metadata buffer
2412 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2413 * the meta buf in this method
2414 * @firstMetadataInBatch: Boolean to indicate whether this is the
2415 * first metadata in a batch. Valid only for batch mode
2416 *
2417 * RETURN :
2418 *
2419 *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool firstMetadataInBatch)2420 void QCamera3HardwareInterface::handleMetadataWithLock(
2421 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2422 bool firstMetadataInBatch)
2423 {
2424 ATRACE_CALL();
2425
2426 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2427 int32_t frame_number_valid, urgent_frame_number_valid;
2428 uint32_t frame_number, urgent_frame_number;
2429 int64_t capture_time;
2430 bool unfinished_raw_request = false;
2431
2432 int32_t *p_frame_number_valid =
2433 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2434 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2435 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2436 int32_t *p_urgent_frame_number_valid =
2437 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2438 uint32_t *p_urgent_frame_number =
2439 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2440 IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2441 metadata) {
2442 CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2443 __func__, *p_frame_number_valid, *p_frame_number);
2444 }
2445
2446 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2447 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2448 ALOGE("%s: Invalid metadata", __func__);
2449 if (free_and_bufdone_meta_buf) {
2450 mMetadataChannel->bufDone(metadata_buf);
2451 free(metadata_buf);
2452 }
2453 goto done_metadata;
2454 } else {
2455 frame_number_valid = *p_frame_number_valid;
2456 frame_number = *p_frame_number;
2457 capture_time = *p_capture_time;
2458 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2459 urgent_frame_number = *p_urgent_frame_number;
2460 }
2461 //Partial result on process_capture_result for timestamp
2462 if (urgent_frame_number_valid) {
2463 CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2464 __func__, urgent_frame_number, capture_time);
2465
2466 //Recieved an urgent Frame Number, handle it
2467 //using partial results
2468 for (pendingRequestIterator i =
2469 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2470 CDBG("%s: Iterator Frame = %d urgent frame = %d",
2471 __func__, i->frame_number, urgent_frame_number);
2472
2473 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2474 (i->partial_result_cnt == 0)) {
2475 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2476 __func__, i->frame_number);
2477 }
2478
2479 if (i->frame_number == urgent_frame_number &&
2480 i->bUrgentReceived == 0) {
2481
2482 camera3_capture_result_t result;
2483 memset(&result, 0, sizeof(camera3_capture_result_t));
2484
2485 i->partial_result_cnt++;
2486 i->bUrgentReceived = 1;
2487 // Extract 3A metadata
2488 result.result =
2489 translateCbUrgentMetadataToResultMetadata(metadata);
2490 // Populate metadata result
2491 result.frame_number = urgent_frame_number;
2492 result.num_output_buffers = 0;
2493 result.output_buffers = NULL;
2494 result.partial_result = i->partial_result_cnt;
2495
2496 mCallbackOps->process_capture_result(mCallbackOps, &result);
2497 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2498 __func__, result.frame_number, capture_time);
2499 free_camera_metadata((camera_metadata_t *)result.result);
2500 break;
2501 }
2502 }
2503 }
2504
2505 if (!frame_number_valid) {
2506 CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2507 if (free_and_bufdone_meta_buf) {
2508 mMetadataChannel->bufDone(metadata_buf);
2509 free(metadata_buf);
2510 }
2511 goto done_metadata;
2512 }
2513 CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2514 frame_number, capture_time);
2515
2516 for (pendingRequestIterator i = mPendingRequestsList.begin();
2517 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2518 // Flush out all entries with less or equal frame numbers.
2519
2520 camera3_capture_result_t result;
2521 memset(&result, 0, sizeof(camera3_capture_result_t));
2522
2523 CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2524
2525 // Check whether any stream buffer corresponding to this is dropped or not
2526 // If dropped, then send the ERROR_BUFFER for the corresponding stream
2527 // The API does not expect a blob buffer to be dropped
2528 if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2529 /* Clear notify_msg structure */
2530 camera3_notify_msg_t notify_msg;
2531 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2532 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2533 j != i->buffers.end(); j++) {
2534 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2535 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2536 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2537 if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2538 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2539 ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2540 __func__, i->frame_number, streamID, j->stream->format);
2541 notify_msg.type = CAMERA3_MSG_ERROR;
2542 notify_msg.message.error.frame_number = i->frame_number;
2543 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2544 notify_msg.message.error.error_stream = j->stream;
2545 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2546 ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2547 __func__, i->frame_number, streamID, j->stream->format);
2548 PendingFrameDropInfo PendingFrameDrop;
2549 PendingFrameDrop.frame_number=i->frame_number;
2550 PendingFrameDrop.stream_ID = streamID;
2551 // Add the Frame drop info to mPendingFrameDropList
2552 mPendingFrameDropList.push_back(PendingFrameDrop);
2553 }
2554 }
2555 }
2556 }
2557
2558 // Send empty metadata with already filled buffers for dropped metadata
2559 // and send valid metadata with already filled buffers for current metadata
2560 /* we could hit this case when we either
2561 * 1. have a pending reprocess request or
2562 * 2. miss a metadata buffer callback */
2563 if (i->frame_number < frame_number) {
2564 if (i->input_buffer) {
2565 /* this will be handled in handleInputBufferWithLock */
2566 i++;
2567 continue;
2568 } else if (i->need_dynamic_blklvl) {
2569 unfinished_raw_request = true;
2570 // i->partial_result_cnt--;
2571 CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
2572 __func__, i->frame_number, i->partial_result_cnt);
2573 i++;
2574 continue;
2575 } else if (i->pending_extra_result) {
2576 CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
2577 __func__, i->frame_number, i->partial_result_cnt,
2578 i->need_dynamic_blklvl);
2579 // i->partial_result_cnt--;
2580 i++;
2581 continue;
2582 } else {
2583 ALOGE("%s: Fatal: Missing metadata buffer for frame number %d", __func__, i->frame_number);
2584 if (free_and_bufdone_meta_buf) {
2585 mMetadataChannel->bufDone(metadata_buf);
2586 free(metadata_buf);
2587 }
2588 camera3_notify_msg_t notify_msg;
2589 memset(¬ify_msg, 0, sizeof(notify_msg));
2590 notify_msg.type = CAMERA3_MSG_ERROR;
2591 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
2592 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2593 goto done_metadata;
2594 }
2595 } else {
2596 i->partial_result_cnt++;
2597 CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
2598 __func__, i->frame_number, i->need_dynamic_blklvl,
2599 i->partial_result_cnt);
2600 if (!i->need_dynamic_blklvl) {
2601 CDBG("%s, meta for request without raw, frame number: %d\n",
2602 __func__, i->frame_number);
2603 if (!unfinished_raw_request) {
2604 i->partial_result_cnt++;
2605 CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
2606 __func__, i->partial_result_cnt);
2607 }
2608 }
2609
2610 result.partial_result = i->partial_result_cnt;
2611
2612 /* Clear notify_msg structure */
2613 camera3_notify_msg_t notify_msg;
2614 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2615
2616 // Send shutter notify to frameworks
2617 notify_msg.type = CAMERA3_MSG_SHUTTER;
2618 notify_msg.message.shutter.frame_number = i->frame_number;
2619 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2620 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2621
2622 i->timestamp = capture_time;
2623
2624 // Find channel requiring metadata, meaning internal offline postprocess
2625 // is needed.
2626 //TODO: for now, we don't support two streams requiring metadata at the same time.
2627 // (because we are not making copies, and metadata buffer is not reference counted.
2628 bool internalPproc = false;
2629 for (pendingBufferIterator iter = i->buffers.begin();
2630 iter != i->buffers.end(); iter++) {
2631 if (iter->need_metadata) {
2632 internalPproc = true;
2633 QCamera3ProcessingChannel *channel =
2634 (QCamera3ProcessingChannel *)iter->stream->priv;
2635 channel->queueReprocMetadata(metadata_buf);
2636 break;
2637 }
2638 }
2639
2640 result.result = translateFromHalMetadata(metadata,
2641 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2642 i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
2643 firstMetadataInBatch);
2644
2645 saveExifParams(metadata);
2646
2647 if (i->blob_request) {
2648 {
2649 //Dump tuning metadata if enabled and available
2650 char prop[PROPERTY_VALUE_MAX];
2651 memset(prop, 0, sizeof(prop));
2652 property_get("persist.camera.dumpmetadata", prop, "0");
2653 int32_t enabled = atoi(prop);
2654 if (enabled && metadata->is_tuning_params_valid) {
2655 dumpMetadataToFile(metadata->tuning_params,
2656 mMetaFrameCount,
2657 enabled,
2658 "Snapshot",
2659 frame_number);
2660 }
2661 }
2662 }
2663
2664 if (!internalPproc) {
2665 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2666 // Return metadata buffer
2667 if (free_and_bufdone_meta_buf) {
2668 mMetadataChannel->bufDone(metadata_buf);
2669 free(metadata_buf);
2670 }
2671 }
2672 }
2673 if (!result.result) {
2674 ALOGE("%s: metadata is NULL", __func__);
2675 }
2676 result.frame_number = i->frame_number;
2677 result.input_buffer = i->input_buffer;
2678 result.num_output_buffers = 0;
2679 result.output_buffers = NULL;
2680 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2681 j != i->buffers.end(); j++) {
2682 if (j->buffer) {
2683 result.num_output_buffers++;
2684 }
2685 }
2686
2687 if (result.num_output_buffers > 0) {
2688 camera3_stream_buffer_t *result_buffers =
2689 new camera3_stream_buffer_t[result.num_output_buffers];
2690 if (!result_buffers) {
2691 ALOGE("%s: Fatal error: out of memory", __func__);
2692 }
2693 size_t result_buffers_idx = 0;
2694 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2695 j != i->buffers.end(); j++) {
2696 if (j->buffer) {
2697 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2698 m != mPendingFrameDropList.end(); m++) {
2699 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2700 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2701 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2702 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2703 ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2704 __func__, frame_number, streamID);
2705 m = mPendingFrameDropList.erase(m);
2706 break;
2707 }
2708 }
2709
2710 for (List<PendingBufferInfo>::iterator k =
2711 mPendingBuffersMap.mPendingBufferList.begin();
2712 k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2713 if (k->buffer == j->buffer->buffer) {
2714 CDBG("%s: Found buffer %p in pending buffer List "
2715 "for frame %u, Take it out!!", __func__,
2716 k->buffer, k->frame_number);
2717 mPendingBuffersMap.num_buffers--;
2718 k = mPendingBuffersMap.mPendingBufferList.erase(k);
2719 break;
2720 }
2721 }
2722
2723 result_buffers[result_buffers_idx++] = *(j->buffer);
2724 free(j->buffer);
2725 j->buffer = NULL;
2726 }
2727 }
2728 result.output_buffers = result_buffers;
2729 mCallbackOps->process_capture_result(mCallbackOps, &result);
2730 CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2731 __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2732 free_camera_metadata((camera_metadata_t *)result.result);
2733 delete[] result_buffers;
2734 } else {
2735 mCallbackOps->process_capture_result(mCallbackOps, &result);
2736 CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2737 __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2738 free_camera_metadata((camera_metadata_t *)result.result);
2739 }
2740
2741 if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2742 mPendingLiveRequest--;
2743 i = erasePendingRequest(i);
2744 } else {
2745 CDBG("%s, keep in list, frame number:%d, partial result:%d",
2746 __func__, i->frame_number, i->partial_result_cnt);
2747 i->pending_extra_result = true;
2748 i++;
2749 }
2750
2751 if (!mPendingReprocessResultList.empty()) {
2752 handlePendingReprocResults(frame_number + 1);
2753 }
2754
2755 }
2756
2757 done_metadata:
2758 for (pendingRequestIterator i = mPendingRequestsList.begin();
2759 i != mPendingRequestsList.end() ;i++) {
2760 i->pipeline_depth++;
2761 }
2762 CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2763 unblockRequestIfNecessary();
2764
2765 }
2766
2767 /*===========================================================================
2768 * FUNCTION : hdrPlusPerfLock
2769 *
2770 * DESCRIPTION: perf lock for HDR+ using custom intent
2771 *
2772 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2773 *
2774 * RETURN : None
2775 *
2776 *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)2777 void QCamera3HardwareInterface::hdrPlusPerfLock(
2778 mm_camera_super_buf_t *metadata_buf)
2779 {
2780 if (NULL == metadata_buf) {
2781 ALOGE("%s: metadata_buf is NULL", __func__);
2782 return;
2783 }
2784 metadata_buffer_t *metadata =
2785 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2786 int32_t *p_frame_number_valid =
2787 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2788 uint32_t *p_frame_number =
2789 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2790
2791 //acquire perf lock for 5 sec after the last HDR frame is captured
2792 if (*p_frame_number_valid) {
2793 if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2794 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2795 }
2796 }
2797
2798 //release lock after perf lock timer is expired. If lock is already released,
2799 //isTimerReset returns false
2800 if (m_perfLock.isTimerReset()) {
2801 mLastCustIntentFrmNum = -1;
2802 m_perfLock.lock_rel_timed();
2803 }
2804 }
2805
2806 /*===========================================================================
2807 * FUNCTION : handleInputBufferWithLock
2808 *
2809 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2810 *
2811 * PARAMETERS : @frame_number: frame number of the input buffer
2812 *
2813 * RETURN :
2814 *
2815 *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)2816 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2817 {
2818 ATRACE_CALL();
2819 pendingRequestIterator i = mPendingRequestsList.begin();
2820 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2821 i++;
2822 }
2823 if (i != mPendingRequestsList.end() && i->input_buffer) {
2824 //found the right request
2825 if (!i->shutter_notified) {
2826 CameraMetadata settings;
2827 camera3_notify_msg_t notify_msg;
2828 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
2829 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2830 if(i->settings) {
2831 settings = i->settings;
2832 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2833 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2834 } else {
2835 ALOGE("%s: No timestamp in input settings! Using current one.",
2836 __func__);
2837 }
2838 } else {
2839 ALOGE("%s: Input settings missing!", __func__);
2840 }
2841
2842 notify_msg.type = CAMERA3_MSG_SHUTTER;
2843 notify_msg.message.shutter.frame_number = frame_number;
2844 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2845 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
2846 i->shutter_notified = true;
2847 CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2848 __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2849 }
2850
2851 if (i->input_buffer->release_fence != -1) {
2852 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2853 close(i->input_buffer->release_fence);
2854 if (rc != OK) {
2855 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2856 }
2857 }
2858
2859 camera3_capture_result result;
2860 memset(&result, 0, sizeof(camera3_capture_result));
2861 result.frame_number = frame_number;
2862 result.result = i->settings;
2863 result.input_buffer = i->input_buffer;
2864 result.partial_result = PARTIAL_RESULT_COUNT;
2865
2866 mCallbackOps->process_capture_result(mCallbackOps, &result);
2867 CDBG("%s: Input request metadata and input buffer frame_number = %u",
2868 __func__, i->frame_number);
2869 i = erasePendingRequest(i);
2870 } else {
2871 ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2872 }
2873 }
2874
getBlackLevelRegion(int (& opticalBlackRegions)[4])2875 bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
2876 {
2877 if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
2878 /*just calculate one region black level and send to fwk*/
2879 for (size_t i = 0; i < 4; i++) {
2880 opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
2881 }
2882 return TRUE;
2883 }
2884
2885 return FALSE;
2886 }
2887
sendDynamicBlackLevel(float blacklevel[4],uint32_t frame_number)2888 void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
2889 {
2890 CDBG("%s, E.\n", __func__);
2891 pthread_mutex_lock(&mMutex);
2892 sendDynamicBlackLevelWithLock(blacklevel, frame_number);
2893 pthread_mutex_unlock(&mMutex);
2894 CDBG("%s, X.\n", __func__);
2895 }
2896
sendDynamicBlackLevelWithLock(float blacklevel[4],uint32_t frame_number)2897 void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
2898 {
2899 CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
2900
2901 pendingRequestIterator i = mPendingRequestsList.begin();
2902 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2903 i++;
2904 }
2905 if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
2906 ALOGE("%s, error: invalid frame number.", __func__);
2907 return;
2908 }
2909
2910 i->partial_result_cnt++;
2911
2912 CameraMetadata camMetadata;
2913 int64_t fwk_frame_number = (int64_t)frame_number;
2914 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2915
2916 // update dynamic black level here
2917 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
2918
2919 camera3_capture_result_t result;
2920 memset(&result, 0, sizeof(camera3_capture_result_t));
2921 result.frame_number = frame_number;
2922 result.num_output_buffers = 0;
2923 result.result = camMetadata.release();
2924 result.partial_result = i->partial_result_cnt;
2925
2926 CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
2927 __func__, result.partial_result, frame_number, i->pending_extra_result);
2928 mCallbackOps->process_capture_result(mCallbackOps, &result);
2929 free_camera_metadata((camera_metadata_t *)result.result);
2930
2931 if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2932 CDBG("%s, remove cur request from pending list.", __func__);
2933 mPendingLiveRequest--;
2934 i = erasePendingRequest(i);
2935
2936 // traverse the remaining pending list to see whether need to send cached ones..
2937 while (i != mPendingRequestsList.end()) {
2938 CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
2939 __func__, i->frame_number, i->partial_result_cnt,
2940 i->pending_extra_result);
2941
2942 if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
2943 && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
2944 // send out final result, and remove it from pending list.
2945 CameraMetadata camMetadata;
2946 int64_t fwk_frame_number = (int64_t)i->frame_number;
2947 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
2948
2949 memset(&result, 0, sizeof(camera3_capture_result_t));
2950 result.frame_number = i->frame_number;
2951 result.num_output_buffers = 0;
2952 result.result = camMetadata.release();
2953 result.partial_result = i->partial_result_cnt + 1;
2954
2955 mCallbackOps->process_capture_result(mCallbackOps, &result);
2956 free_camera_metadata((camera_metadata_t *)result.result);
2957
2958 mPendingLiveRequest--;
2959 i = erasePendingRequest(i);
2960 CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
2961 __func__, mPendingLiveRequest, mPendingRequestsList.size());
2962 } else {
2963 break;
2964 }
2965 }
2966 }
2967
2968 unblockRequestIfNecessary();
2969 CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
2970 }
2971
2972
2973 /*===========================================================================
2974 * FUNCTION : handleBufferWithLock
2975 *
2976 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2977 *
2978 * PARAMETERS : @buffer: image buffer for the callback
2979 * @frame_number: frame number of the image buffer
2980 *
2981 * RETURN :
2982 *
2983 *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)2984 void QCamera3HardwareInterface::handleBufferWithLock(
2985 camera3_stream_buffer_t *buffer, uint32_t frame_number)
2986 {
2987 ATRACE_CALL();
2988 // If the frame number doesn't exist in the pending request list,
2989 // directly send the buffer to the frameworks, and update pending buffers map
2990 // Otherwise, book-keep the buffer.
2991 pendingRequestIterator i = mPendingRequestsList.begin();
2992 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2993 i++;
2994 }
2995 if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
2996 if (i != mPendingRequestsList.end()) {
2997 // though the pendingRequestInfo is still in the list,
2998 // still send the buffer directly, as the pending_extra_result is true,
2999 // and we've already received meta for this frame number.
3000 CDBG("%s, send the buffer directly, frame number:%d",
3001 __func__, i->frame_number);
3002 }
3003 // Verify all pending requests frame_numbers are greater
3004 for (pendingRequestIterator j = mPendingRequestsList.begin();
3005 j != mPendingRequestsList.end(); j++) {
3006 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3007 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
3008 __func__, j->frame_number, frame_number);
3009 }
3010 }
3011 camera3_capture_result_t result;
3012 memset(&result, 0, sizeof(camera3_capture_result_t));
3013 result.result = NULL;
3014 result.frame_number = frame_number;
3015 result.num_output_buffers = 1;
3016 result.partial_result = 0;
3017 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3018 m != mPendingFrameDropList.end(); m++) {
3019 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3020 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3021 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3022 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3023 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
3024 __func__, frame_number, streamID);
3025 m = mPendingFrameDropList.erase(m);
3026 break;
3027 }
3028 }
3029 result.output_buffers = buffer;
3030 CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
3031 __func__, frame_number, buffer->buffer);
3032
3033 for (List<PendingBufferInfo>::iterator k =
3034 mPendingBuffersMap.mPendingBufferList.begin();
3035 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3036 if (k->buffer == buffer->buffer) {
3037 CDBG("%s: Found Frame buffer, take it out from list",
3038 __func__);
3039
3040 mPendingBuffersMap.num_buffers--;
3041 k = mPendingBuffersMap.mPendingBufferList.erase(k);
3042 break;
3043 }
3044 }
3045 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3046 __func__, mPendingBuffersMap.num_buffers);
3047
3048 mCallbackOps->process_capture_result(mCallbackOps, &result);
3049 } else {
3050 if (i->input_buffer) {
3051 CameraMetadata settings;
3052 camera3_notify_msg_t notify_msg;
3053 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3054 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3055 if(i->settings) {
3056 settings = i->settings;
3057 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3058 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3059 } else {
3060 ALOGE("%s: No timestamp in input settings! Using current one.",
3061 __func__);
3062 }
3063 } else {
3064 ALOGE("%s: Input settings missing!", __func__);
3065 }
3066
3067 notify_msg.type = CAMERA3_MSG_SHUTTER;
3068 notify_msg.message.shutter.frame_number = frame_number;
3069 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3070
3071 if (i->input_buffer->release_fence != -1) {
3072 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3073 close(i->input_buffer->release_fence);
3074 if (rc != OK) {
3075 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3076 }
3077 }
3078
3079 for (List<PendingBufferInfo>::iterator k =
3080 mPendingBuffersMap.mPendingBufferList.begin();
3081 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3082 if (k->buffer == buffer->buffer) {
3083 CDBG("%s: Found Frame buffer, take it out from list",
3084 __func__);
3085
3086 mPendingBuffersMap.num_buffers--;
3087 k = mPendingBuffersMap.mPendingBufferList.erase(k);
3088 break;
3089 }
3090 }
3091 CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3092 __func__, mPendingBuffersMap.num_buffers);
3093
3094 bool notifyNow = true;
3095 for (pendingRequestIterator j = mPendingRequestsList.begin();
3096 j != mPendingRequestsList.end(); j++) {
3097 if (j->frame_number < frame_number) {
3098 notifyNow = false;
3099 break;
3100 }
3101 }
3102
3103 if (notifyNow) {
3104 camera3_capture_result result;
3105 memset(&result, 0, sizeof(camera3_capture_result));
3106 result.frame_number = frame_number;
3107 result.result = i->settings;
3108 result.input_buffer = i->input_buffer;
3109 result.num_output_buffers = 1;
3110 result.output_buffers = buffer;
3111 result.partial_result = PARTIAL_RESULT_COUNT;
3112
3113 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3114 mCallbackOps->process_capture_result(mCallbackOps, &result);
3115 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
3116 i = erasePendingRequest(i);
3117 } else {
3118 // Cache reprocess result for later
3119 PendingReprocessResult pendingResult;
3120 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3121 pendingResult.notify_msg = notify_msg;
3122 pendingResult.buffer = *buffer;
3123 pendingResult.frame_number = frame_number;
3124 mPendingReprocessResultList.push_back(pendingResult);
3125 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
3126 }
3127 } else {
3128 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3129 j != i->buffers.end(); j++) {
3130 if (j->stream == buffer->stream) {
3131 if (j->buffer != NULL) {
3132 ALOGE("%s: Error: buffer is already set", __func__);
3133 } else {
3134 j->buffer = (camera3_stream_buffer_t *)malloc(
3135 sizeof(camera3_stream_buffer_t));
3136 *(j->buffer) = *buffer;
3137 CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
3138 __func__, buffer, frame_number);
3139 }
3140 }
3141 }
3142 }
3143 }
3144 }
3145
3146 /*===========================================================================
3147 * FUNCTION : unblockRequestIfNecessary
3148 *
3149 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3150 * that mMutex is held when this function is called.
3151 *
3152 * PARAMETERS :
3153 *
3154 * RETURN :
3155 *
3156 *==========================================================================*/
unblockRequestIfNecessary()3157 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3158 {
3159 // Unblock process_capture_request
3160 pthread_cond_signal(&mRequestCond);
3161 }
3162
3163
3164 /*===========================================================================
3165 * FUNCTION : processCaptureRequest
3166 *
3167 * DESCRIPTION: process a capture request from camera service
3168 *
3169 * PARAMETERS :
3170 * @request : request from framework to process
3171 *
3172 * RETURN :
3173 *
3174 *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3175 int QCamera3HardwareInterface::processCaptureRequest(
3176 camera3_capture_request_t *request)
3177 {
3178 ATRACE_CALL();
3179 int rc = NO_ERROR;
3180 int32_t request_id;
3181 CameraMetadata meta;
3182 uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3183 uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3184 bool isVidBufRequested = false;
3185 camera3_stream_buffer_t *pInputBuffer = NULL;
3186
3187 pthread_mutex_lock(&mMutex);
3188
3189 rc = validateCaptureRequest(request);
3190 if (rc != NO_ERROR) {
3191 ALOGE("%s: incoming request is not valid", __func__);
3192 pthread_mutex_unlock(&mMutex);
3193 return rc;
3194 }
3195
3196 meta = request->settings;
3197
3198 // For first capture request, send capture intent, and
3199 // stream on all streams
3200 if (mFirstRequest) {
3201 // send an unconfigure to the backend so that the isp
3202 // resources are deallocated
3203 if (!mFirstConfiguration) {
3204 cam_stream_size_info_t stream_config_info;
3205 int32_t hal_version = CAM_HAL_V3;
3206 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3207 stream_config_info.buffer_info.min_buffers =
3208 MIN_INFLIGHT_REQUESTS;
3209 stream_config_info.buffer_info.max_buffers =
3210 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3211 clear_metadata_buffer(mParameters);
3212 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3213 CAM_INTF_PARM_HAL_VERSION, hal_version);
3214 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3215 CAM_INTF_META_STREAM_INFO, stream_config_info);
3216 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3217 mParameters);
3218 if (rc < 0) {
3219 ALOGE("%s: set_parms for unconfigure failed", __func__);
3220 pthread_mutex_unlock(&mMutex);
3221 return rc;
3222 }
3223 }
3224 m_perfLock.lock_acq();
3225 /* get eis information for stream configuration */
3226 cam_is_type_t is_type;
3227 char is_type_value[PROPERTY_VALUE_MAX];
3228 property_get("persist.camera.is_type", is_type_value, "0");
3229 is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3230
3231 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3232 int32_t hal_version = CAM_HAL_V3;
3233 uint8_t captureIntent =
3234 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3235 mCaptureIntent = captureIntent;
3236 clear_metadata_buffer(mParameters);
3237 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3238 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3239 }
3240
3241 //If EIS is enabled, turn it on for video
3242 bool setEis = m_bEisEnable && m_bEisSupportedSize;
3243 int32_t vsMode;
3244 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3246 rc = BAD_VALUE;
3247 }
3248
3249 //IS type will be 0 unless EIS is supported. If EIS is supported
3250 //it could either be 1 or 4 depending on the stream and video size
3251 if (setEis) {
3252 if (!m_bEisSupportedSize) {
3253 is_type = IS_TYPE_DIS;
3254 } else {
3255 is_type = IS_TYPE_EIS_2_0;
3256 }
3257 mStreamConfigInfo.is_type = is_type;
3258 } else {
3259 mStreamConfigInfo.is_type = IS_TYPE_NONE;
3260 }
3261
3262 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3263 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3264 int32_t tintless_value = 1;
3265 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3266 CAM_INTF_PARM_TINTLESS, tintless_value);
3267 //Disable CDS for HFR mode and if mPprocBypass = true.
3268 //CDS is a session parameter in the backend/ISP, so need to be set/reset
3269 //after every configure_stream
3270 if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3271 (m_bIsVideo)) {
3272 int32_t cds = CAM_CDS_MODE_OFF;
3273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3274 CAM_INTF_PARM_CDS_MODE, cds))
3275 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3276
3277 }
3278 setMobicat();
3279
3280 /* Set fps and hfr mode while sending meta stream info so that sensor
3281 * can configure appropriate streaming mode */
3282 mHFRVideoFps = DEFAULT_VIDEO_FPS;
3283 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3284 rc = setHalFpsRange(meta, mParameters);
3285 if (rc != NO_ERROR) {
3286 ALOGE("%s: setHalFpsRange failed", __func__);
3287 }
3288 }
3289 if (meta.exists(ANDROID_CONTROL_MODE)) {
3290 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3291 rc = extractSceneMode(meta, metaMode, mParameters);
3292 if (rc != NO_ERROR) {
3293 ALOGE("%s: extractSceneMode failed", __func__);
3294 }
3295 }
3296
3297 //TODO: validate the arguments, HSV scenemode should have only the
3298 //advertised fps ranges
3299
3300 /*set the capture intent, hal version, tintless, stream info,
3301 *and disenable parameters to the backend*/
3302 CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3303 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3304 mParameters);
3305
3306 cam_dimension_t sensor_dim;
3307 memset(&sensor_dim, 0, sizeof(sensor_dim));
3308 rc = getSensorOutputSize(sensor_dim);
3309 if (rc != NO_ERROR) {
3310 ALOGE("%s: Failed to get sensor output size", __func__);
3311 pthread_mutex_unlock(&mMutex);
3312 goto error_exit;
3313 }
3314
3315 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3316 gCamCapability[mCameraId]->active_array_size.height,
3317 sensor_dim.width, sensor_dim.height);
3318
3319 /* Set batchmode before initializing channel. Since registerBuffer
3320 * internally initializes some of the channels, better set batchmode
3321 * even before first register buffer */
3322 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3323 it != mStreamInfo.end(); it++) {
3324 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3325 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3326 && mBatchSize) {
3327 rc = channel->setBatchSize(mBatchSize);
3328 //Disable per frame map unmap for HFR/batchmode case
3329 rc |= channel->setPerFrameMapUnmap(false);
3330 if (NO_ERROR != rc) {
3331 ALOGE("%s : Channel init failed %d", __func__, rc);
3332 pthread_mutex_unlock(&mMutex);
3333 goto error_exit;
3334 }
3335 }
3336 }
3337
3338 //First initialize all streams
3339 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3340 it != mStreamInfo.end(); it++) {
3341 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3342 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3343 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3344 setEis)
3345 rc = channel->initialize(is_type);
3346 else {
3347 rc = channel->initialize(IS_TYPE_NONE);
3348 }
3349 if (NO_ERROR != rc) {
3350 ALOGE("%s : Channel initialization failed %d", __func__, rc);
3351 pthread_mutex_unlock(&mMutex);
3352 goto error_exit;
3353 }
3354 }
3355
3356 if (mRawDumpChannel) {
3357 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3358 if (rc != NO_ERROR) {
3359 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3360 pthread_mutex_unlock(&mMutex);
3361 goto error_exit;
3362 }
3363 }
3364 if (mSupportChannel) {
3365 rc = mSupportChannel->initialize(IS_TYPE_NONE);
3366 if (rc < 0) {
3367 ALOGE("%s: Support channel initialization failed", __func__);
3368 pthread_mutex_unlock(&mMutex);
3369 goto error_exit;
3370 }
3371 }
3372 if (mAnalysisChannel) {
3373 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3374 if (rc < 0) {
3375 ALOGE("%s: Analysis channel initialization failed", __func__);
3376 pthread_mutex_unlock(&mMutex);
3377 goto error_exit;
3378 }
3379 }
3380 if (mDummyBatchChannel) {
3381 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3382 if (rc < 0) {
3383 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3384 pthread_mutex_unlock(&mMutex);
3385 goto error_exit;
3386 }
3387 rc = mDummyBatchChannel->initialize(is_type);
3388 if (rc < 0) {
3389 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3390 pthread_mutex_unlock(&mMutex);
3391 goto error_exit;
3392 }
3393 }
3394
3395 // Set bundle info
3396 rc = setBundleInfo();
3397 if (rc < 0) {
3398 ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3399 pthread_mutex_unlock(&mMutex);
3400 goto error_exit;
3401 }
3402
3403 //Then start them.
3404 CDBG_HIGH("%s: Start META Channel", __func__);
3405 rc = mMetadataChannel->start();
3406 if (rc < 0) {
3407 ALOGE("%s: META channel start failed", __func__);
3408 pthread_mutex_unlock(&mMutex);
3409 goto error_exit;
3410 }
3411
3412 if (mAnalysisChannel) {
3413 rc = mAnalysisChannel->start();
3414 if (rc < 0) {
3415 ALOGE("%s: Analysis channel start failed", __func__);
3416 mMetadataChannel->stop();
3417 pthread_mutex_unlock(&mMutex);
3418 goto error_exit;
3419 }
3420 }
3421
3422 if (mSupportChannel) {
3423 rc = mSupportChannel->start();
3424 if (rc < 0) {
3425 ALOGE("%s: Support channel start failed", __func__);
3426 mMetadataChannel->stop();
3427 /* Although support and analysis are mutually exclusive today
3428 adding it in anycase for future proofing */
3429 if (mAnalysisChannel) {
3430 mAnalysisChannel->stop();
3431 }
3432 pthread_mutex_unlock(&mMutex);
3433 goto error_exit;
3434 }
3435 }
3436 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3437 it != mStreamInfo.end(); it++) {
3438 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3439 CDBG_HIGH("%s: Start Processing Channel mask=%d",
3440 __func__, channel->getStreamTypeMask());
3441 rc = channel->start();
3442 if (rc < 0) {
3443 ALOGE("%s: channel start failed", __func__);
3444 pthread_mutex_unlock(&mMutex);
3445 goto error_exit;
3446 }
3447 }
3448
3449 if (mRawDumpChannel) {
3450 CDBG("%s: Starting raw dump stream",__func__);
3451 rc = mRawDumpChannel->start();
3452 if (rc != NO_ERROR) {
3453 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3454 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3455 it != mStreamInfo.end(); it++) {
3456 QCamera3Channel *channel =
3457 (QCamera3Channel *)(*it)->stream->priv;
3458 ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3459 channel->getStreamTypeMask());
3460 channel->stop();
3461 }
3462 if (mSupportChannel)
3463 mSupportChannel->stop();
3464 if (mAnalysisChannel) {
3465 mAnalysisChannel->stop();
3466 }
3467 mMetadataChannel->stop();
3468 pthread_mutex_unlock(&mMutex);
3469 goto error_exit;
3470 }
3471 }
3472
3473 if (mChannelHandle) {
3474
3475 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3476 mChannelHandle);
3477 if (rc != NO_ERROR) {
3478 ALOGE("%s: start_channel failed %d", __func__, rc);
3479 pthread_mutex_unlock(&mMutex);
3480 goto error_exit;
3481 }
3482 }
3483
3484
3485 goto no_error;
3486 error_exit:
3487 m_perfLock.lock_rel();
3488 return rc;
3489 no_error:
3490 m_perfLock.lock_rel();
3491
3492 mWokenUpByDaemon = false;
3493 mPendingLiveRequest = 0;
3494 mFirstConfiguration = false;
3495 enablePowerHint();
3496 }
3497
3498 uint32_t frameNumber = request->frame_number;
3499 cam_stream_ID_t streamID;
3500
3501 if (meta.exists(ANDROID_REQUEST_ID)) {
3502 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3503 mCurrentRequestId = request_id;
3504 CDBG("%s: Received request with id: %d",__func__, request_id);
3505 } else if (mFirstRequest || mCurrentRequestId == -1){
3506 ALOGE("%s: Unable to find request id field, \
3507 & no previous id available", __func__);
3508 pthread_mutex_unlock(&mMutex);
3509 return NAME_NOT_FOUND;
3510 } else {
3511 CDBG("%s: Re-using old request id", __func__);
3512 request_id = mCurrentRequestId;
3513 }
3514
3515 CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3516 __func__, __LINE__,
3517 request->num_output_buffers,
3518 request->input_buffer,
3519 frameNumber);
3520 // Acquire all request buffers first
3521 streamID.num_streams = 0;
3522 int blob_request = 0;
3523 uint32_t snapshotStreamId = 0;
3524 for (size_t i = 0; i < request->num_output_buffers; i++) {
3525 const camera3_stream_buffer_t& output = request->output_buffers[i];
3526 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3527
3528 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3529 //Call function to store local copy of jpeg data for encode params.
3530 blob_request = 1;
3531 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3532 }
3533
3534 if (output.acquire_fence != -1) {
3535 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3536 close(output.acquire_fence);
3537 if (rc != OK) {
3538 ALOGE("%s: sync wait failed %d", __func__, rc);
3539 pthread_mutex_unlock(&mMutex);
3540 return rc;
3541 }
3542 }
3543
3544 streamID.streamID[streamID.num_streams] =
3545 channel->getStreamID(channel->getStreamTypeMask());
3546 streamID.num_streams++;
3547
3548 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3549 isVidBufRequested = true;
3550 }
3551 }
3552
3553 if (blob_request && mRawDumpChannel) {
3554 CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3555 streamID.streamID[streamID.num_streams] =
3556 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3557 streamID.num_streams++;
3558 }
3559
3560 if(request->input_buffer == NULL) {
3561 /* Parse the settings:
3562 * - For every request in NORMAL MODE
3563 * - For every request in HFR mode during preview only case
3564 * - For first request of every batch in HFR mode during video
3565 * recording. In batchmode the same settings except frame number is
3566 * repeated in each request of the batch.
3567 */
3568 if (!mBatchSize ||
3569 (mBatchSize && !isVidBufRequested) ||
3570 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3571 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3572 if (rc < 0) {
3573 ALOGE("%s: fail to set frame parameters", __func__);
3574 pthread_mutex_unlock(&mMutex);
3575 return rc;
3576 }
3577 }
3578 /* For batchMode HFR, setFrameParameters is not called for every
3579 * request. But only frame number of the latest request is parsed.
3580 * Keep track of first and last frame numbers in a batch so that
3581 * metadata for the frame numbers of batch can be duplicated in
3582 * handleBatchMetadta */
3583 if (mBatchSize) {
3584 if (!mToBeQueuedVidBufs) {
3585 //start of the batch
3586 mFirstFrameNumberInBatch = request->frame_number;
3587 }
3588 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3589 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3590 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3591 return BAD_VALUE;
3592 }
3593 }
3594 if (mNeedSensorRestart) {
3595 /* Unlock the mutex as restartSensor waits on the channels to be
3596 * stopped, which in turn calls stream callback functions -
3597 * handleBufferWithLock and handleMetadataWithLock */
3598 pthread_mutex_unlock(&mMutex);
3599 rc = dynamicUpdateMetaStreamInfo();
3600 if (rc != NO_ERROR) {
3601 ALOGE("%s: Restarting the sensor failed", __func__);
3602 return BAD_VALUE;
3603 }
3604 mNeedSensorRestart = false;
3605 pthread_mutex_lock(&mMutex);
3606 }
3607 } else {
3608
3609 if (request->input_buffer->acquire_fence != -1) {
3610 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3611 close(request->input_buffer->acquire_fence);
3612 if (rc != OK) {
3613 ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3614 pthread_mutex_unlock(&mMutex);
3615 return rc;
3616 }
3617 }
3618 }
3619
3620 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3621 mLastCustIntentFrmNum = frameNumber;
3622 }
3623 /* Update pending request list and pending buffers map */
3624 PendingRequestInfo pendingRequest;
3625 pendingRequestIterator latestRequest;
3626 pendingRequest.frame_number = frameNumber;
3627 pendingRequest.num_buffers = request->num_output_buffers;
3628 pendingRequest.request_id = request_id;
3629 pendingRequest.blob_request = blob_request;
3630 pendingRequest.timestamp = 0;
3631 pendingRequest.bUrgentReceived = 0;
3632 if (request->input_buffer) {
3633 pendingRequest.input_buffer =
3634 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3635 *(pendingRequest.input_buffer) = *(request->input_buffer);
3636 pInputBuffer = pendingRequest.input_buffer;
3637 } else {
3638 pendingRequest.input_buffer = NULL;
3639 pInputBuffer = NULL;
3640 }
3641
3642 pendingRequest.pipeline_depth = 0;
3643 pendingRequest.partial_result_cnt = 0;
3644 extractJpegMetadata(mCurJpegMeta, request);
3645 pendingRequest.jpegMetadata = mCurJpegMeta;
3646 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3647 pendingRequest.shutter_notified = false;
3648 pendingRequest.need_dynamic_blklvl = false;
3649 pendingRequest.pending_extra_result = false;
3650
3651 //extract capture intent
3652 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3653 mCaptureIntent =
3654 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3655 }
3656 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3657 mHybridAeEnable =
3658 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3659 }
3660 pendingRequest.capture_intent = mCaptureIntent;
3661 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3662
3663 for (size_t i = 0; i < request->num_output_buffers; i++) {
3664 RequestedBufferInfo requestedBuf;
3665 memset(&requestedBuf, 0, sizeof(requestedBuf));
3666 requestedBuf.stream = request->output_buffers[i].stream;
3667 requestedBuf.buffer = NULL;
3668 pendingRequest.buffers.push_back(requestedBuf);
3669
3670 // Add to buffer handle the pending buffers list
3671 PendingBufferInfo bufferInfo;
3672 bufferInfo.frame_number = frameNumber;
3673 bufferInfo.buffer = request->output_buffers[i].buffer;
3674 bufferInfo.stream = request->output_buffers[i].stream;
3675 mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3676 mPendingBuffersMap.num_buffers++;
3677 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3678 CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3679 __func__, frameNumber, bufferInfo.buffer,
3680 channel->getStreamTypeMask(), bufferInfo.stream->format);
3681
3682 if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
3683 if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
3684 CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
3685 pendingRequest.need_dynamic_blklvl = true;
3686 }
3687 }
3688 }
3689 mPendingBuffersMap.last_frame_number = frameNumber;
3690 latestRequest = mPendingRequestsList.insert(
3691 mPendingRequestsList.end(), pendingRequest);
3692 if(mFlush) {
3693 pthread_mutex_unlock(&mMutex);
3694 return NO_ERROR;
3695 }
3696
3697 // Notify metadata channel we receive a request
3698 mMetadataChannel->request(NULL, frameNumber);
3699
3700 if(request->input_buffer != NULL){
3701 CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3702 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3703 if (NO_ERROR != rc) {
3704 ALOGE("%s: fail to set reproc parameters", __func__);
3705 pthread_mutex_unlock(&mMutex);
3706 return rc;
3707 }
3708 }
3709
3710 // Call request on other streams
3711 uint32_t streams_need_metadata = 0;
3712 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3713 for (size_t i = 0; i < request->num_output_buffers; i++) {
3714 const camera3_stream_buffer_t& output = request->output_buffers[i];
3715 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3716
3717 if (channel == NULL) {
3718 ALOGE("%s: invalid channel pointer for stream", __func__);
3719 continue;
3720 }
3721
3722 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3723 if(request->input_buffer != NULL){
3724 rc = channel->request(output.buffer, frameNumber,
3725 pInputBuffer, &mReprocMeta);
3726 if (rc < 0) {
3727 ALOGE("%s: Fail to request on picture channel", __func__);
3728 pthread_mutex_unlock(&mMutex);
3729 return rc;
3730 }
3731 } else {
3732 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3733 __LINE__, output.buffer, frameNumber);
3734 if (!request->settings) {
3735 rc = channel->request(output.buffer, frameNumber,
3736 NULL, mPrevParameters);
3737 } else {
3738 rc = channel->request(output.buffer, frameNumber,
3739 NULL, mParameters);
3740 }
3741 if (rc < 0) {
3742 ALOGE("%s: Fail to request on picture channel", __func__);
3743 pthread_mutex_unlock(&mMutex);
3744 return rc;
3745 }
3746 pendingBufferIter->need_metadata = true;
3747 streams_need_metadata++;
3748 }
3749 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3750 bool needMetadata = false;
3751 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3752 rc = yuvChannel->request(output.buffer, frameNumber,
3753 pInputBuffer,
3754 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3755 if (rc < 0) {
3756 ALOGE("%s: Fail to request on YUV channel", __func__);
3757 pthread_mutex_unlock(&mMutex);
3758 return rc;
3759 }
3760 pendingBufferIter->need_metadata = needMetadata;
3761 if (needMetadata)
3762 streams_need_metadata += 1;
3763 CDBG("%s: calling YUV channel request, need_metadata is %d",
3764 __func__, needMetadata);
3765 } else {
3766 CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3767 __LINE__, output.buffer, frameNumber);
3768 rc = channel->request(output.buffer, frameNumber);
3769 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3770 && mBatchSize) {
3771 mToBeQueuedVidBufs++;
3772 if (mToBeQueuedVidBufs == mBatchSize) {
3773 channel->queueBatchBuf();
3774 }
3775 }
3776 if (rc < 0) {
3777 ALOGE("%s: request failed", __func__);
3778 pthread_mutex_unlock(&mMutex);
3779 return rc;
3780 }
3781 }
3782 pendingBufferIter++;
3783 }
3784
3785 //If 2 streams have need_metadata set to true, fail the request, unless
3786 //we copy/reference count the metadata buffer
3787 if (streams_need_metadata > 1) {
3788 ALOGE("%s: not supporting request in which two streams requires"
3789 " 2 HAL metadata for reprocessing", __func__);
3790 pthread_mutex_unlock(&mMutex);
3791 return -EINVAL;
3792 }
3793
3794 if(request->input_buffer == NULL) {
3795 /* Set the parameters to backend:
3796 * - For every request in NORMAL MODE
3797 * - For every request in HFR mode during preview only case
3798 * - Once every batch in HFR mode during video recording
3799 */
3800 if (!mBatchSize ||
3801 (mBatchSize && !isVidBufRequested) ||
3802 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3803 CDBG("%s: set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3804 __func__, mBatchSize, isVidBufRequested,
3805 mToBeQueuedVidBufs);
3806 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3807 mParameters);
3808 if (rc < 0) {
3809 ALOGE("%s: set_parms failed", __func__);
3810 }
3811 /* reset to zero coz, the batch is queued */
3812 mToBeQueuedVidBufs = 0;
3813 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3814 }
3815 mPendingLiveRequest++;
3816 }
3817
3818 CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3819
3820 mFirstRequest = false;
3821 // Added a timed condition wait
3822 struct timespec ts;
3823 uint8_t isValidTimeout = 1;
3824 rc = clock_gettime(CLOCK_REALTIME, &ts);
3825 if (rc < 0) {
3826 isValidTimeout = 0;
3827 ALOGE("%s: Error reading the real time clock!!", __func__);
3828 }
3829 else {
3830 // Make timeout as 5 sec for request to be honored
3831 ts.tv_sec += 5;
3832 }
3833 //Block on conditional variable
3834 if (mBatchSize) {
3835 /* For HFR, more buffers are dequeued upfront to improve the performance */
3836 minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3837 maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3838 }
3839 while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3840 if (!isValidTimeout) {
3841 CDBG("%s: Blocking on conditional wait", __func__);
3842 pthread_cond_wait(&mRequestCond, &mMutex);
3843 }
3844 else {
3845 CDBG("%s: Blocking on timed conditional wait", __func__);
3846 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3847 if (rc == ETIMEDOUT) {
3848 rc = -ENODEV;
3849 ALOGE("%s: Unblocked on timeout!!!!", __func__);
3850 break;
3851 }
3852 }
3853 CDBG("%s: Unblocked", __func__);
3854 if (mWokenUpByDaemon) {
3855 mWokenUpByDaemon = false;
3856 if (mPendingLiveRequest < maxInFlightRequests)
3857 break;
3858 }
3859 }
3860 pthread_mutex_unlock(&mMutex);
3861
3862 return rc;
3863 }
3864
3865 /*===========================================================================
3866 * FUNCTION : dump
3867 *
3868 * DESCRIPTION:
3869 *
3870 * PARAMETERS :
3871 *
3872 *
3873 * RETURN :
3874 *==========================================================================*/
dump(int fd)3875 void QCamera3HardwareInterface::dump(int fd)
3876 {
3877 pthread_mutex_lock(&mMutex);
3878 dprintf(fd, "\n Camera HAL3 information Begin \n");
3879
3880 dprintf(fd, "\nNumber of pending requests: %zu \n",
3881 mPendingRequestsList.size());
3882 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3883 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
3884 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3885 for(pendingRequestIterator i = mPendingRequestsList.begin();
3886 i != mPendingRequestsList.end(); i++) {
3887 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3888 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3889 i->input_buffer);
3890 }
3891 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3892 mPendingBuffersMap.num_buffers);
3893 dprintf(fd, "-------+------------------\n");
3894 dprintf(fd, " Frame | Stream type mask \n");
3895 dprintf(fd, "-------+------------------\n");
3896 for(List<PendingBufferInfo>::iterator i =
3897 mPendingBuffersMap.mPendingBufferList.begin();
3898 i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3899 QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3900 dprintf(fd, " %5d | %11d \n",
3901 i->frame_number, channel->getStreamTypeMask());
3902 }
3903 dprintf(fd, "-------+------------------\n");
3904
3905 dprintf(fd, "\nPending frame drop list: %zu\n",
3906 mPendingFrameDropList.size());
3907 dprintf(fd, "-------+-----------\n");
3908 dprintf(fd, " Frame | Stream ID \n");
3909 dprintf(fd, "-------+-----------\n");
3910 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3911 i != mPendingFrameDropList.end(); i++) {
3912 dprintf(fd, " %5d | %9d \n",
3913 i->frame_number, i->stream_ID);
3914 }
3915 dprintf(fd, "-------+-----------\n");
3916
3917 dprintf(fd, "\n Camera HAL3 information End \n");
3918
3919 /* use dumpsys media.camera as trigger to send update debug level event */
3920 mUpdateDebugLevel = true;
3921 pthread_mutex_unlock(&mMutex);
3922 return;
3923 }
3924
3925 /*===========================================================================
3926 * FUNCTION : flush
3927 *
3928 * DESCRIPTION:
3929 *
3930 * PARAMETERS :
3931 *
3932 *
3933 * RETURN :
3934 *==========================================================================*/
flush()3935 int QCamera3HardwareInterface::flush()
3936 {
3937 ATRACE_CALL();
3938 int32_t rc = NO_ERROR;
3939
3940 CDBG("%s: Unblocking Process Capture Request", __func__);
3941 pthread_mutex_lock(&mMutex);
3942
3943 if (mFirstRequest) {
3944 pthread_mutex_unlock(&mMutex);
3945 return NO_ERROR;
3946 }
3947
3948 mFlush = true;
3949 pthread_mutex_unlock(&mMutex);
3950
3951 rc = stopAllChannels();
3952 if (rc < 0) {
3953 ALOGE("%s: stopAllChannels failed", __func__);
3954 return rc;
3955 }
3956 if (mChannelHandle) {
3957 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3958 mChannelHandle);
3959 }
3960
3961 // Reset bundle info
3962 rc = setBundleInfo();
3963 if (rc < 0) {
3964 ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3965 return rc;
3966 }
3967
3968 // Mutex Lock
3969 pthread_mutex_lock(&mMutex);
3970
3971 // Unblock process_capture_request
3972 mPendingLiveRequest = 0;
3973 pthread_cond_signal(&mRequestCond);
3974
3975 rc = notifyErrorForPendingRequests();
3976 if (rc < 0) {
3977 ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3978 pthread_mutex_unlock(&mMutex);
3979 return rc;
3980 }
3981
3982 mFlush = false;
3983
3984 // Start the Streams/Channels
3985 rc = startAllChannels();
3986 if (rc < 0) {
3987 ALOGE("%s: startAllChannels failed", __func__);
3988 pthread_mutex_unlock(&mMutex);
3989 return rc;
3990 }
3991
3992 if (mChannelHandle) {
3993 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3994 mChannelHandle);
3995 if (rc < 0) {
3996 ALOGE("%s: start_channel failed", __func__);
3997 pthread_mutex_unlock(&mMutex);
3998 return rc;
3999 }
4000 }
4001
4002 pthread_mutex_unlock(&mMutex);
4003
4004 return 0;
4005 }
4006
4007 /*===========================================================================
4008 * FUNCTION : captureResultCb
4009 *
4010 * DESCRIPTION: Callback handler for all capture result
4011 * (streams, as well as metadata)
4012 *
4013 * PARAMETERS :
4014 * @metadata : metadata information
4015 * @buffer : actual gralloc buffer to be returned to frameworks.
4016 * NULL if metadata.
4017 *
4018 * RETURN : NONE
4019 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4020 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4021 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4022 {
4023 if (metadata_buf) {
4024 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
4025 handleBatchMetadata(metadata_buf,
4026 true /* free_and_bufdone_meta_buf */);
4027 } else { /* mBatchSize = 0 */
4028 hdrPlusPerfLock(metadata_buf);
4029 pthread_mutex_lock(&mMutex);
4030 handleMetadataWithLock(metadata_buf,
4031 true /* free_and_bufdone_meta_buf */,
4032 false /* first frame of batch metadata */ );
4033 pthread_mutex_unlock(&mMutex);
4034 }
4035 } else if (isInputBuffer) {
4036 pthread_mutex_lock(&mMutex);
4037 handleInputBufferWithLock(frame_number);
4038 pthread_mutex_unlock(&mMutex);
4039 } else {
4040 pthread_mutex_lock(&mMutex);
4041 handleBufferWithLock(buffer, frame_number);
4042 pthread_mutex_unlock(&mMutex);
4043 }
4044 return;
4045 }
4046
4047 /*===========================================================================
4048 * FUNCTION : getReprocessibleOutputStreamId
4049 *
4050 * DESCRIPTION: Get source output stream id for the input reprocess stream
4051 * based on size and format, which would be the largest
4052 * output stream if an input stream exists.
4053 *
4054 * PARAMETERS :
4055 * @id : return the stream id if found
4056 *
4057 * RETURN : int32_t type of status
4058 * NO_ERROR -- success
4059 * none-zero failure code
4060 *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)4061 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4062 {
4063 stream_info_t* stream = NULL;
4064
4065 /* check if any output or bidirectional stream with the same size and format
4066 and return that stream */
4067 if ((mInputStreamInfo.dim.width > 0) &&
4068 (mInputStreamInfo.dim.height > 0)) {
4069 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4070 it != mStreamInfo.end(); it++) {
4071
4072 camera3_stream_t *stream = (*it)->stream;
4073 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4074 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4075 (stream->format == mInputStreamInfo.format)) {
4076 // Usage flag for an input stream and the source output stream
4077 // may be different.
4078 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
4079 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
4080 __func__, stream->usage, mInputStreamInfo.usage);
4081
4082 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4083 if (channel != NULL && channel->mStreams[0]) {
4084 id = channel->mStreams[0]->getMyServerID();
4085 return NO_ERROR;
4086 }
4087 }
4088 }
4089 } else {
4090 CDBG("%s: No input stream, so no reprocessible output stream", __func__);
4091 }
4092 return NAME_NOT_FOUND;
4093 }
4094
4095 /*===========================================================================
4096 * FUNCTION : lookupFwkName
4097 *
4098 * DESCRIPTION: In case the enum is not same in fwk and backend
4099 * make sure the parameter is correctly propogated
4100 *
4101 * PARAMETERS :
4102 * @arr : map between the two enums
4103 * @len : len of the map
4104 * @hal_name : name of the hal_parm to map
4105 *
4106 * RETURN : int type of status
4107 * fwk_name -- success
4108 * none-zero failure code
4109 *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)4110 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4111 size_t len, halType hal_name)
4112 {
4113
4114 for (size_t i = 0; i < len; i++) {
4115 if (arr[i].hal_name == hal_name) {
4116 return arr[i].fwk_name;
4117 }
4118 }
4119
4120 /* Not able to find matching framework type is not necessarily
4121 * an error case. This happens when mm-camera supports more attributes
4122 * than the frameworks do */
4123 CDBG_HIGH("%s: Cannot find matching framework type", __func__);
4124 return NAME_NOT_FOUND;
4125 }
4126
4127 /*===========================================================================
4128 * FUNCTION : lookupHalName
4129 *
4130 * DESCRIPTION: In case the enum is not same in fwk and backend
4131 * make sure the parameter is correctly propogated
4132 *
4133 * PARAMETERS :
4134 * @arr : map between the two enums
4135 * @len : len of the map
4136 * @fwk_name : name of the hal_parm to map
4137 *
4138 * RETURN : int32_t type of status
4139 * hal_name -- success
4140 * none-zero failure code
4141 *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)4142 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4143 size_t len, fwkType fwk_name)
4144 {
4145 for (size_t i = 0; i < len; i++) {
4146 if (arr[i].fwk_name == fwk_name) {
4147 return arr[i].hal_name;
4148 }
4149 }
4150
4151 ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
4152 return NAME_NOT_FOUND;
4153 }
4154
4155 /*===========================================================================
4156 * FUNCTION : lookupProp
4157 *
4158 * DESCRIPTION: lookup a value by its name
4159 *
4160 * PARAMETERS :
4161 * @arr : map between the two enums
4162 * @len : size of the map
4163 * @name : name to be looked up
4164 *
4165 * RETURN : Value if found
4166 * CAM_CDS_MODE_MAX if not found
4167 *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)4168 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4169 size_t len, const char *name)
4170 {
4171 if (name) {
4172 for (size_t i = 0; i < len; i++) {
4173 if (!strcmp(arr[i].desc, name)) {
4174 return arr[i].val;
4175 }
4176 }
4177 }
4178 return CAM_CDS_MODE_MAX;
4179 }
4180
4181 /*===========================================================================
4182 *
4183 * DESCRIPTION:
4184 *
4185 * PARAMETERS :
4186 * @metadata : metadata information from callback
4187 * @timestamp: metadata buffer timestamp
4188 * @request_id: request id
4189 * @hybrid_ae_enable: whether hybrid ae is enabled
4190 * @jpegMetadata: additional jpeg metadata
4191 * @pprocDone: whether internal offline postprocsesing is done
4192 *
4193 * RETURN : camera_metadata_t*
4194 * metadata in a format specified by fwk
4195 *==========================================================================*/
4196 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,nsecs_t timestamp,int32_t request_id,const CameraMetadata & jpegMetadata,uint8_t pipeline_depth,uint8_t capture_intent,uint8_t hybrid_ae_enable,bool pprocDone,bool dynamic_blklvl,bool firstMetadataInBatch)4197 QCamera3HardwareInterface::translateFromHalMetadata(
4198 metadata_buffer_t *metadata,
4199 nsecs_t timestamp,
4200 int32_t request_id,
4201 const CameraMetadata& jpegMetadata,
4202 uint8_t pipeline_depth,
4203 uint8_t capture_intent,
4204 uint8_t hybrid_ae_enable,
4205 bool pprocDone,
4206 bool dynamic_blklvl,
4207 bool firstMetadataInBatch)
4208 {
4209 CameraMetadata camMetadata;
4210 camera_metadata_t *resultMetadata;
4211
4212 if (mBatchSize && !firstMetadataInBatch) {
4213 /* In batch mode, use cached metadata from the first metadata
4214 in the batch */
4215 camMetadata.clear();
4216 camMetadata = mCachedMetadata;
4217 }
4218
4219 if (jpegMetadata.entryCount())
4220 camMetadata.append(jpegMetadata);
4221
4222 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1);
4223 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4224 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4225 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4226 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4227
4228 if (mBatchSize && !firstMetadataInBatch) {
4229 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4230 resultMetadata = camMetadata.release();
4231 return resultMetadata;
4232 }
4233
4234 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4235 int64_t fwk_frame_number = *frame_number;
4236 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4237 }
4238
4239 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4240 int32_t fps_range[2];
4241 fps_range[0] = (int32_t)float_range->min_fps;
4242 fps_range[1] = (int32_t)float_range->max_fps;
4243 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4244 fps_range, 2);
4245 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4246 __func__, fps_range[0], fps_range[1]);
4247 }
4248
4249 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4250 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4251 }
4252
4253 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4254 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4255 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4256 *sceneMode);
4257 if (NAME_NOT_FOUND != val) {
4258 uint8_t fwkSceneMode = (uint8_t)val;
4259 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4260 CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4261 __func__, fwkSceneMode);
4262 }
4263 }
4264
4265 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4266 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4267 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4268 }
4269
4270 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4271 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4272 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4273 }
4274
4275 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4276 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4277 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4278 }
4279
4280 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4281 CAM_INTF_META_EDGE_MODE, metadata) {
4282 uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4283 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4284 }
4285
4286 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4287 uint8_t fwk_flashPower = (uint8_t) *flashPower;
4288 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4289 }
4290
4291 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4292 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4293 }
4294
4295 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4296 if (0 <= *flashState) {
4297 uint8_t fwk_flashState = (uint8_t) *flashState;
4298 if (!gCamCapability[mCameraId]->flash_available) {
4299 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4300 }
4301 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4302 }
4303 }
4304
4305 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4306 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4307 if (NAME_NOT_FOUND != val) {
4308 uint8_t fwk_flashMode = (uint8_t)val;
4309 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4310 }
4311 }
4312
4313 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4314 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4315 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4316 }
4317
4318 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4319 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4320 }
4321
4322 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4323 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4324 }
4325
4326 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4327 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4328 }
4329
4330 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4331 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4332 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4333 }
4334
4335 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4336 uint8_t fwk_videoStab = (uint8_t) *videoStab;
4337 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4338 }
4339
4340 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4341 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4342 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4343 }
4344
4345 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4346 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4347 }
4348
4349 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4350 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4351
4352 CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4353 blackLevelSourcePattern->cam_black_level[0],
4354 blackLevelSourcePattern->cam_black_level[1],
4355 blackLevelSourcePattern->cam_black_level[2],
4356 blackLevelSourcePattern->cam_black_level[3]);
4357 }
4358
4359 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4360 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4361 float fwk_blackLevelInd[4];
4362
4363 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4364 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4365 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4366 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4367
4368 CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4369 blackLevelAppliedPattern->cam_black_level[0],
4370 blackLevelAppliedPattern->cam_black_level[1],
4371 blackLevelAppliedPattern->cam_black_level[2],
4372 blackLevelAppliedPattern->cam_black_level[3]);
4373 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4374 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4375
4376 // if dynmaic_blklvl is true, we calculate blklvl from raw callback
4377 // otherwise, use the value from linearization LUT.
4378 if (dynamic_blklvl == false) {
4379 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4380 // depth space.
4381 fwk_blackLevelInd[0] /= 64.0;
4382 fwk_blackLevelInd[1] /= 64.0;
4383 fwk_blackLevelInd[2] /= 64.0;
4384 fwk_blackLevelInd[3] /= 64.0;
4385 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4386 }
4387 }
4388
4389 // Fixed whitelevel is used by ISP/Sensor
4390 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4391 &gCamCapability[mCameraId]->white_level, 1);
4392
4393 if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4394 gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4395 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4396 for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4397 opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4398 }
4399 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4400 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4401 }
4402
4403 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4404 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4405 int32_t scalerCropRegion[4];
4406 scalerCropRegion[0] = hScalerCropRegion->left;
4407 scalerCropRegion[1] = hScalerCropRegion->top;
4408 scalerCropRegion[2] = hScalerCropRegion->width;
4409 scalerCropRegion[3] = hScalerCropRegion->height;
4410
4411 // Adjust crop region from sensor output coordinate system to active
4412 // array coordinate system.
4413 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4414 scalerCropRegion[2], scalerCropRegion[3]);
4415
4416 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4417 }
4418
4419 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4420 CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4421 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4422 }
4423
4424 IF_META_AVAILABLE(int64_t, sensorFameDuration,
4425 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4426 CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4427 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4428 }
4429
4430 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4431 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4432 CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4433 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4434 sensorRollingShutterSkew, 1);
4435 }
4436
4437 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4438 CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4439 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4440
4441 //calculate the noise profile based on sensitivity
4442 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4443 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4444 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4445 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4446 noise_profile[i] = noise_profile_S;
4447 noise_profile[i+1] = noise_profile_O;
4448 }
4449 CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4450 noise_profile_S, noise_profile_O);
4451 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4452 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4453 }
4454
4455 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4456 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4457 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4458 }
4459
4460 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4461 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4462 *faceDetectMode);
4463 if (NAME_NOT_FOUND != val) {
4464 uint8_t fwk_faceDetectMode = (uint8_t)val;
4465 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4466
4467 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4468 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4469 CAM_INTF_META_FACE_DETECTION, metadata) {
4470 uint8_t numFaces = MIN(
4471 faceDetectionInfo->num_faces_detected, MAX_ROI);
4472 int32_t faceIds[MAX_ROI];
4473 uint8_t faceScores[MAX_ROI];
4474 int32_t faceRectangles[MAX_ROI * 4];
4475 int32_t faceLandmarks[MAX_ROI * 6];
4476 size_t j = 0, k = 0;
4477
4478 for (size_t i = 0; i < numFaces; i++) {
4479 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4480 // Adjust crop region from sensor output coordinate system to active
4481 // array coordinate system.
4482 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4483 mCropRegionMapper.toActiveArray(rect.left, rect.top,
4484 rect.width, rect.height);
4485
4486 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4487 faceRectangles+j, -1);
4488
4489 // Map the co-ordinate sensor output coordinate system to active
4490 // array coordinate system.
4491 cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4492 mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4493 face.left_eye_center.y);
4494 mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4495 face.right_eye_center.y);
4496 mCropRegionMapper.toActiveArray(face.mouth_center.x,
4497 face.mouth_center.y);
4498
4499 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4500 j+= 4;
4501 k+= 6;
4502 }
4503 if (numFaces <= 0) {
4504 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4505 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4506 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4507 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4508 }
4509
4510 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4511 numFaces);
4512 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4513 faceRectangles, numFaces * 4U);
4514 if (fwk_faceDetectMode ==
4515 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4516 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4517 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4518 faceLandmarks, numFaces * 6U);
4519 }
4520 }
4521 }
4522 }
4523 }
4524
4525 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4526 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4527 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4528 }
4529
4530 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4531 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4532 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4533 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4534 }
4535
4536 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4537 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4538 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4539 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4540 }
4541
4542 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4543 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4544 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4545 CAM_MAX_SHADING_MAP_HEIGHT);
4546 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4547 CAM_MAX_SHADING_MAP_WIDTH);
4548 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4549 lensShadingMap->lens_shading, 4U * map_width * map_height);
4550 }
4551
4552 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4553 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4554 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4555 }
4556
4557 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4558 //Populate CAM_INTF_META_TONEMAP_CURVES
4559 /* ch0 = G, ch 1 = B, ch 2 = R*/
4560 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4561 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4562 __func__, tonemap->tonemap_points_cnt,
4563 CAM_MAX_TONEMAP_CURVE_SIZE);
4564 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4565 }
4566
4567 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4568 &tonemap->curves[0].tonemap_points[0][0],
4569 tonemap->tonemap_points_cnt * 2);
4570
4571 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4572 &tonemap->curves[1].tonemap_points[0][0],
4573 tonemap->tonemap_points_cnt * 2);
4574
4575 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4576 &tonemap->curves[2].tonemap_points[0][0],
4577 tonemap->tonemap_points_cnt * 2);
4578 }
4579
4580 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4581 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4582 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4583 CC_GAINS_COUNT);
4584 }
4585
4586 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4587 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4588 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4589 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4590 CC_MATRIX_COLS * CC_MATRIX_ROWS);
4591 }
4592
4593 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4594 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4595 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4596 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4597 __func__, toneCurve->tonemap_points_cnt,
4598 CAM_MAX_TONEMAP_CURVE_SIZE);
4599 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4600 }
4601 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4602 (float*)toneCurve->curve.tonemap_points,
4603 toneCurve->tonemap_points_cnt * 2);
4604 }
4605
4606 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4607 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4608 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4609 predColorCorrectionGains->gains, 4);
4610 }
4611
4612 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4613 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4614 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4615 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4616 CC_MATRIX_ROWS * CC_MATRIX_COLS);
4617 }
4618
4619 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4620 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4621 }
4622
4623 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4624 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4625 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4626 }
4627
4628 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4629 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4630 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4631 }
4632
4633 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4634 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4635 *effectMode);
4636 if (NAME_NOT_FOUND != val) {
4637 uint8_t fwk_effectMode = (uint8_t)val;
4638 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4639 }
4640 }
4641
4642 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4643 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4644 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4645 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4646 if (NAME_NOT_FOUND != fwk_testPatternMode) {
4647 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4648 }
4649 int32_t fwk_testPatternData[4];
4650 fwk_testPatternData[0] = testPatternData->r;
4651 fwk_testPatternData[3] = testPatternData->b;
4652 switch (gCamCapability[mCameraId]->color_arrangement) {
4653 case CAM_FILTER_ARRANGEMENT_RGGB:
4654 case CAM_FILTER_ARRANGEMENT_GRBG:
4655 fwk_testPatternData[1] = testPatternData->gr;
4656 fwk_testPatternData[2] = testPatternData->gb;
4657 break;
4658 case CAM_FILTER_ARRANGEMENT_GBRG:
4659 case CAM_FILTER_ARRANGEMENT_BGGR:
4660 fwk_testPatternData[2] = testPatternData->gr;
4661 fwk_testPatternData[1] = testPatternData->gb;
4662 break;
4663 default:
4664 ALOGE("%s: color arrangement %d is not supported", __func__,
4665 gCamCapability[mCameraId]->color_arrangement);
4666 break;
4667 }
4668 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4669 }
4670
4671 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4672 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4673 }
4674
4675 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4676 String8 str((const char *)gps_methods);
4677 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4678 }
4679
4680 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4681 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4682 }
4683
4684 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4685 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4686 }
4687
4688 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4689 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4690 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4691 }
4692
4693 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4694 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4695 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4696 }
4697
4698 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4699 int32_t fwk_thumb_size[2];
4700 fwk_thumb_size[0] = thumb_size->width;
4701 fwk_thumb_size[1] = thumb_size->height;
4702 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4703 }
4704
4705 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4706 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4707 privateData,
4708 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4709 }
4710
4711 if (metadata->is_tuning_params_valid) {
4712 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4713 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4714 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4715
4716
4717 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4718 sizeof(uint32_t));
4719 data += sizeof(uint32_t);
4720
4721 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4722 sizeof(uint32_t));
4723 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4724 data += sizeof(uint32_t);
4725
4726 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4727 sizeof(uint32_t));
4728 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4729 data += sizeof(uint32_t);
4730
4731 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4732 sizeof(uint32_t));
4733 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4734 data += sizeof(uint32_t);
4735
4736 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4737 sizeof(uint32_t));
4738 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4739 data += sizeof(uint32_t);
4740
4741 metadata->tuning_params.tuning_mod3_data_size = 0;
4742 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4743 sizeof(uint32_t));
4744 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4745 data += sizeof(uint32_t);
4746
4747 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4748 TUNING_SENSOR_DATA_MAX);
4749 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4750 count);
4751 data += count;
4752
4753 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4754 TUNING_VFE_DATA_MAX);
4755 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4756 count);
4757 data += count;
4758
4759 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4760 TUNING_CPP_DATA_MAX);
4761 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4762 count);
4763 data += count;
4764
4765 count = MIN(metadata->tuning_params.tuning_cac_data_size,
4766 TUNING_CAC_DATA_MAX);
4767 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4768 count);
4769 data += count;
4770
4771 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4772 (int32_t *)(void *)tuning_meta_data_blob,
4773 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4774 }
4775
4776 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4777 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4778 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4779 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4780 NEUTRAL_COL_POINTS);
4781 }
4782
4783 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4784 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4785 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4786 }
4787
4788 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4789 int32_t aeRegions[REGIONS_TUPLE_COUNT];
4790 // Adjust crop region from sensor output coordinate system to active
4791 // array coordinate system.
4792 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4793 hAeRegions->rect.width, hAeRegions->rect.height);
4794
4795 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4796 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4797 REGIONS_TUPLE_COUNT);
4798 CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4799 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4800 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4801 hAeRegions->rect.height);
4802 }
4803
4804 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4805 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4806 if (NAME_NOT_FOUND != val) {
4807 uint8_t fwkAfMode = (uint8_t)val;
4808 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4809 CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
4810 } else {
4811 CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
4812 __func__, val);
4813 }
4814 }
4815
4816 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4817 uint8_t fwk_afState = (uint8_t) *afState;
4818 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4819 CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4820 }
4821
4822 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4823 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4824 }
4825
4826 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4827 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4828 }
4829
4830 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4831 uint8_t fwk_lensState = *lensState;
4832 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4833 }
4834
4835 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4836 /*af regions*/
4837 int32_t afRegions[REGIONS_TUPLE_COUNT];
4838 // Adjust crop region from sensor output coordinate system to active
4839 // array coordinate system.
4840 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4841 hAfRegions->rect.width, hAfRegions->rect.height);
4842
4843 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4844 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4845 REGIONS_TUPLE_COUNT);
4846 CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4847 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4848 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4849 hAfRegions->rect.height);
4850 }
4851
4852 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4853 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4854 *hal_ab_mode);
4855 if (NAME_NOT_FOUND != val) {
4856 uint8_t fwk_ab_mode = (uint8_t)val;
4857 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4858 }
4859 }
4860
4861 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4862 int val = lookupFwkName(SCENE_MODES_MAP,
4863 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4864 if (NAME_NOT_FOUND != val) {
4865 uint8_t fwkBestshotMode = (uint8_t)val;
4866 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4867 CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4868 } else {
4869 CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4870 }
4871 }
4872
4873 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4874 uint8_t fwk_mode = (uint8_t) *mode;
4875 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4876 }
4877
4878 /* Constant metadata values to be update*/
4879 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4880 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4881
4882 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4883 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4884
4885 int32_t hotPixelMap[2];
4886 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4887
4888 // CDS
4889 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4890 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4891 }
4892
4893 // TNR
4894 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4895 uint8_t tnr_enable = tnr->denoise_enable;
4896 int32_t tnr_process_type = (int32_t)tnr->process_plates;
4897
4898 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4899 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4900 }
4901
4902 // Reprocess crop data
4903 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4904 uint8_t cnt = crop_data->num_of_streams;
4905 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4906 // mm-qcamera-daemon only posts crop_data for streams
4907 // not linked to pproc. So no valid crop metadata is not
4908 // necessarily an error case.
4909 CDBG("%s: No valid crop metadata entries", __func__);
4910 } else {
4911 uint32_t reproc_stream_id;
4912 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4913 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4914 } else {
4915 int rc = NO_ERROR;
4916 Vector<int32_t> roi_map;
4917 int32_t *crop = new int32_t[cnt*4];
4918 if (NULL == crop) {
4919 rc = NO_MEMORY;
4920 }
4921 if (NO_ERROR == rc) {
4922 int32_t streams_found = 0;
4923 for (size_t i = 0; i < cnt; i++) {
4924 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4925 if (pprocDone) {
4926 // HAL already does internal reprocessing,
4927 // either via reprocessing before JPEG encoding,
4928 // or offline postprocessing for pproc bypass case.
4929 crop[0] = 0;
4930 crop[1] = 0;
4931 crop[2] = mInputStreamInfo.dim.width;
4932 crop[3] = mInputStreamInfo.dim.height;
4933 } else {
4934 crop[0] = crop_data->crop_info[i].crop.left;
4935 crop[1] = crop_data->crop_info[i].crop.top;
4936 crop[2] = crop_data->crop_info[i].crop.width;
4937 crop[3] = crop_data->crop_info[i].crop.height;
4938 }
4939 roi_map.add(crop_data->crop_info[i].roi_map.left);
4940 roi_map.add(crop_data->crop_info[i].roi_map.top);
4941 roi_map.add(crop_data->crop_info[i].roi_map.width);
4942 roi_map.add(crop_data->crop_info[i].roi_map.height);
4943 streams_found++;
4944 CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4945 __func__,
4946 crop[0], crop[1], crop[2], crop[3]);
4947 CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4948 __func__,
4949 crop_data->crop_info[i].roi_map.left,
4950 crop_data->crop_info[i].roi_map.top,
4951 crop_data->crop_info[i].roi_map.width,
4952 crop_data->crop_info[i].roi_map.height);
4953 break;
4954
4955 }
4956 }
4957 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4958 &streams_found, 1);
4959 camMetadata.update(QCAMERA3_CROP_REPROCESS,
4960 crop, (size_t)(streams_found * 4));
4961 if (roi_map.array()) {
4962 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4963 roi_map.array(), roi_map.size());
4964 }
4965 }
4966 if (crop) {
4967 delete [] crop;
4968 }
4969 }
4970 }
4971 }
4972
4973 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4974 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4975 *cacMode);
4976 if (NAME_NOT_FOUND != val) {
4977 uint8_t fwkCacMode = (uint8_t)val;
4978 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4979 } else {
4980 ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4981 }
4982 }
4983
4984 // Post blob of cam_cds_data through vendor tag.
4985 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4986 uint8_t cnt = cdsInfo->num_of_streams;
4987 cam_cds_data_t cdsDataOverride;
4988 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4989 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4990 cdsDataOverride.num_of_streams = 1;
4991 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4992 uint32_t reproc_stream_id;
4993 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4994 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4995 } else {
4996 for (size_t i = 0; i < cnt; i++) {
4997 if (cdsInfo->cds_info[i].stream_id ==
4998 reproc_stream_id) {
4999 cdsDataOverride.cds_info[0].cds_enable =
5000 cdsInfo->cds_info[i].cds_enable;
5001 break;
5002 }
5003 }
5004 }
5005 } else {
5006 CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
5007 }
5008 camMetadata.update(QCAMERA3_CDS_INFO,
5009 (uint8_t *)&cdsDataOverride,
5010 sizeof(cam_cds_data_t));
5011 }
5012
5013 // Ldaf calibration data
5014 if (!mLdafCalibExist) {
5015 IF_META_AVAILABLE(uint32_t, ldafCalib,
5016 CAM_INTF_META_LDAF_EXIF, metadata) {
5017 mLdafCalibExist = true;
5018 mLdafCalib[0] = ldafCalib[0];
5019 mLdafCalib[1] = ldafCalib[1];
5020 CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
5021 ldafCalib[0], ldafCalib[1]);
5022 }
5023 }
5024
5025 // Post Raw Sensitivity Boost = ISP digital gain
5026 IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
5027 int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
5028 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
5029 }
5030
5031 /* In batch mode, cache the first metadata in the batch */
5032 if (mBatchSize && firstMetadataInBatch) {
5033 mCachedMetadata.clear();
5034 mCachedMetadata = camMetadata;
5035 }
5036
5037 resultMetadata = camMetadata.release();
5038 return resultMetadata;
5039 }
5040
5041 /*===========================================================================
5042 * FUNCTION : saveExifParams
5043 *
5044 * DESCRIPTION:
5045 *
5046 * PARAMETERS :
5047 * @metadata : metadata information from callback
5048 *
5049 * RETURN : none
5050 *
5051 *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)5052 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5053 {
5054 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5055 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5056 mExifParams.ae_debug_params = *ae_exif_debug_params;
5057 mExifParams.ae_debug_params_valid = TRUE;
5058 }
5059 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5060 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5061 mExifParams.awb_debug_params = *awb_exif_debug_params;
5062 mExifParams.awb_debug_params_valid = TRUE;
5063 }
5064 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5065 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5066 mExifParams.af_debug_params = *af_exif_debug_params;
5067 mExifParams.af_debug_params_valid = TRUE;
5068 }
5069 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5070 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5071 mExifParams.asd_debug_params = *asd_exif_debug_params;
5072 mExifParams.asd_debug_params_valid = TRUE;
5073 }
5074 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5075 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5076 mExifParams.stats_debug_params = *stats_exif_debug_params;
5077 mExifParams.stats_debug_params_valid = TRUE;
5078 }
5079 }
5080
5081 /*===========================================================================
5082 * FUNCTION : get3AExifParams
5083 *
5084 * DESCRIPTION:
5085 *
5086 * PARAMETERS : none
5087 *
5088 *
5089 * RETURN : mm_jpeg_exif_params_t
5090 *
5091 *==========================================================================*/
get3AExifParams()5092 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5093 {
5094 return mExifParams;
5095 }
5096
5097 /*===========================================================================
5098 * FUNCTION : translateCbUrgentMetadataToResultMetadata
5099 *
5100 * DESCRIPTION:
5101 *
5102 * PARAMETERS :
5103 * @metadata : metadata information from callback
5104 *
5105 * RETURN : camera_metadata_t*
5106 * metadata in a format specified by fwk
5107 *==========================================================================*/
5108 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata)5109 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5110 (metadata_buffer_t *metadata)
5111 {
5112 CameraMetadata camMetadata;
5113 camera_metadata_t *resultMetadata;
5114
5115
5116 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5117 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5118 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5119 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
5120 }
5121
5122 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5123 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5124 &aecTrigger->trigger, 1);
5125 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5126 &aecTrigger->trigger_id, 1);
5127 CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5128 __func__, aecTrigger->trigger);
5129 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
5130 aecTrigger->trigger_id);
5131 }
5132
5133 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5134 uint8_t fwk_ae_state = (uint8_t) *ae_state;
5135 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5136 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
5137 }
5138
5139 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5140 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5141 &af_trigger->trigger, 1);
5142 CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5143 __func__, af_trigger->trigger);
5144 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5145 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
5146 af_trigger->trigger_id);
5147 }
5148
5149 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5150 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5151 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5152 if (NAME_NOT_FOUND != val) {
5153 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5154 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5155 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
5156 } else {
5157 CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
5158 }
5159 }
5160
5161 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5162 uint32_t aeMode = CAM_AE_MODE_MAX;
5163 int32_t flashMode = CAM_FLASH_MODE_MAX;
5164 int32_t redeye = -1;
5165 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5166 aeMode = *pAeMode;
5167 }
5168 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5169 flashMode = *pFlashMode;
5170 }
5171 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5172 redeye = *pRedeye;
5173 }
5174
5175 if (1 == redeye) {
5176 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5177 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5178 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5179 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5180 flashMode);
5181 if (NAME_NOT_FOUND != val) {
5182 fwk_aeMode = (uint8_t)val;
5183 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5184 } else {
5185 ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
5186 }
5187 } else if (aeMode == CAM_AE_MODE_ON) {
5188 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5189 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5190 } else if (aeMode == CAM_AE_MODE_OFF) {
5191 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5192 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5193 } else {
5194 ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5195 "flashMode:%d, aeMode:%u!!!",
5196 __func__, redeye, flashMode, aeMode);
5197 }
5198
5199 resultMetadata = camMetadata.release();
5200 return resultMetadata;
5201 }
5202
5203 /*===========================================================================
5204 * FUNCTION : dumpMetadataToFile
5205 *
5206 * DESCRIPTION: Dumps tuning metadata to file system
5207 *
5208 * PARAMETERS :
5209 * @meta : tuning metadata
5210 * @dumpFrameCount : current dump frame count
5211 * @enabled : Enable mask
5212 *
5213 *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)5214 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5215 uint32_t &dumpFrameCount,
5216 bool enabled,
5217 const char *type,
5218 uint32_t frameNumber)
5219 {
5220 uint32_t frm_num = 0;
5221
5222 //Some sanity checks
5223 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5224 ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
5225 __func__,
5226 meta.tuning_sensor_data_size,
5227 TUNING_SENSOR_DATA_MAX);
5228 return;
5229 }
5230
5231 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5232 ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5233 __func__,
5234 meta.tuning_vfe_data_size,
5235 TUNING_VFE_DATA_MAX);
5236 return;
5237 }
5238
5239 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5240 ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5241 __func__,
5242 meta.tuning_cpp_data_size,
5243 TUNING_CPP_DATA_MAX);
5244 return;
5245 }
5246
5247 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5248 ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5249 __func__,
5250 meta.tuning_cac_data_size,
5251 TUNING_CAC_DATA_MAX);
5252 return;
5253 }
5254 //
5255
5256 if(enabled){
5257 char timeBuf[FILENAME_MAX];
5258 char buf[FILENAME_MAX];
5259 memset(buf, 0, sizeof(buf));
5260 memset(timeBuf, 0, sizeof(timeBuf));
5261 time_t current_time;
5262 struct tm * timeinfo;
5263 time (¤t_time);
5264 timeinfo = localtime (¤t_time);
5265 if (timeinfo != NULL) {
5266 strftime (timeBuf, sizeof(timeBuf),
5267 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5268 }
5269 String8 filePath(timeBuf);
5270 snprintf(buf,
5271 sizeof(buf),
5272 "%dm_%s_%d.bin",
5273 dumpFrameCount,
5274 type,
5275 frameNumber);
5276 filePath.append(buf);
5277 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5278 if (file_fd >= 0) {
5279 ssize_t written_len = 0;
5280 meta.tuning_data_version = TUNING_DATA_VERSION;
5281 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5282 written_len += write(file_fd, data, sizeof(uint32_t));
5283 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5284 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5285 written_len += write(file_fd, data, sizeof(uint32_t));
5286 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5287 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5288 written_len += write(file_fd, data, sizeof(uint32_t));
5289 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5290 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5291 written_len += write(file_fd, data, sizeof(uint32_t));
5292 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5293 CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5294 written_len += write(file_fd, data, sizeof(uint32_t));
5295 meta.tuning_mod3_data_size = 0;
5296 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5297 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5298 written_len += write(file_fd, data, sizeof(uint32_t));
5299 size_t total_size = meta.tuning_sensor_data_size;
5300 data = (void *)((uint8_t *)&meta.data);
5301 written_len += write(file_fd, data, total_size);
5302 total_size = meta.tuning_vfe_data_size;
5303 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5304 written_len += write(file_fd, data, total_size);
5305 total_size = meta.tuning_cpp_data_size;
5306 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5307 written_len += write(file_fd, data, total_size);
5308 total_size = meta.tuning_cac_data_size;
5309 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5310 written_len += write(file_fd, data, total_size);
5311 close(file_fd);
5312 }else {
5313 ALOGE("%s: fail to open file for metadata dumping", __func__);
5314 }
5315 }
5316 }
5317
5318 /*===========================================================================
5319 * FUNCTION : cleanAndSortStreamInfo
5320 *
5321 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5322 * and sort them such that raw stream is at the end of the list
5323 * This is a workaround for camera daemon constraint.
5324 *
5325 * PARAMETERS : None
5326 *
5327 *==========================================================================*/
cleanAndSortStreamInfo()5328 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5329 {
5330 List<stream_info_t *> newStreamInfo;
5331
5332 /*clean up invalid streams*/
5333 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5334 it != mStreamInfo.end();) {
5335 if(((*it)->status) == INVALID){
5336 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5337 delete channel;
5338 free(*it);
5339 it = mStreamInfo.erase(it);
5340 } else {
5341 it++;
5342 }
5343 }
5344
5345 // Move preview/video/callback/snapshot streams into newList
5346 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5347 it != mStreamInfo.end();) {
5348 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5349 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5350 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5351 newStreamInfo.push_back(*it);
5352 it = mStreamInfo.erase(it);
5353 } else
5354 it++;
5355 }
5356 // Move raw streams into newList
5357 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5358 it != mStreamInfo.end();) {
5359 newStreamInfo.push_back(*it);
5360 it = mStreamInfo.erase(it);
5361 }
5362
5363 mStreamInfo = newStreamInfo;
5364 }
5365
5366 /*===========================================================================
5367 * FUNCTION : extractJpegMetadata
5368 *
5369 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5370 * JPEG metadata is cached in HAL, and return as part of capture
5371 * result when metadata is returned from camera daemon.
5372 *
5373 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5374 * @request: capture request
5375 *
5376 *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)5377 void QCamera3HardwareInterface::extractJpegMetadata(
5378 CameraMetadata& jpegMetadata,
5379 const camera3_capture_request_t *request)
5380 {
5381 CameraMetadata frame_settings;
5382 frame_settings = request->settings;
5383
5384 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5385 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5386 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5387 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5388
5389 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5390 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5391 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5392 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5393
5394 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5395 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5396 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5397 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5398
5399 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5400 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5401 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5402 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5403
5404 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5405 jpegMetadata.update(ANDROID_JPEG_QUALITY,
5406 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5407 frame_settings.find(ANDROID_JPEG_QUALITY).count);
5408
5409 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5410 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5411 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5412 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5413
5414 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5415 int32_t thumbnail_size[2];
5416 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5417 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5418 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5419 int32_t orientation =
5420 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5421 if ((orientation == 90) || (orientation == 270)) {
5422 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5423 int32_t temp;
5424 temp = thumbnail_size[0];
5425 thumbnail_size[0] = thumbnail_size[1];
5426 thumbnail_size[1] = temp;
5427 }
5428 }
5429 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5430 thumbnail_size,
5431 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5432 }
5433
5434 }
5435
5436 /*===========================================================================
5437 * FUNCTION : convertToRegions
5438 *
5439 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5440 *
5441 * PARAMETERS :
5442 * @rect : cam_rect_t struct to convert
5443 * @region : int32_t destination array
5444 * @weight : if we are converting from cam_area_t, weight is valid
5445 * else weight = -1
5446 *
5447 *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)5448 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5449 int32_t *region, int weight)
5450 {
5451 region[0] = rect.left;
5452 region[1] = rect.top;
5453 region[2] = rect.left + rect.width;
5454 region[3] = rect.top + rect.height;
5455 if (weight > -1) {
5456 region[4] = weight;
5457 }
5458 }
5459
5460 /*===========================================================================
5461 * FUNCTION : convertFromRegions
5462 *
5463 * DESCRIPTION: helper method to convert from array to cam_rect_t
5464 *
5465 * PARAMETERS :
5466 * @rect : cam_rect_t struct to convert
5467 * @region : int32_t destination array
5468 * @weight : if we are converting from cam_area_t, weight is valid
5469 * else weight = -1
5470 *
5471 *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)5472 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5473 const camera_metadata_t *settings, uint32_t tag)
5474 {
5475 CameraMetadata frame_settings;
5476 frame_settings = settings;
5477 int32_t x_min = frame_settings.find(tag).data.i32[0];
5478 int32_t y_min = frame_settings.find(tag).data.i32[1];
5479 int32_t x_max = frame_settings.find(tag).data.i32[2];
5480 int32_t y_max = frame_settings.find(tag).data.i32[3];
5481 roi.weight = frame_settings.find(tag).data.i32[4];
5482 roi.rect.left = x_min;
5483 roi.rect.top = y_min;
5484 roi.rect.width = x_max - x_min;
5485 roi.rect.height = y_max - y_min;
5486 }
5487
5488 /*===========================================================================
5489 * FUNCTION : resetIfNeededROI
5490 *
5491 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5492 * crop region
5493 *
5494 * PARAMETERS :
5495 * @roi : cam_area_t struct to resize
5496 * @scalerCropRegion : cam_crop_region_t region to compare against
5497 *
5498 *
5499 *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)5500 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5501 const cam_crop_region_t* scalerCropRegion)
5502 {
5503 int32_t roi_x_max = roi->rect.width + roi->rect.left;
5504 int32_t roi_y_max = roi->rect.height + roi->rect.top;
5505 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5506 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5507
5508 /* According to spec weight = 0 is used to indicate roi needs to be disabled
5509 * without having this check the calculations below to validate if the roi
5510 * is inside scalar crop region will fail resulting in the roi not being
5511 * reset causing algorithm to continue to use stale roi window
5512 */
5513 if (roi->weight == 0) {
5514 return true;
5515 }
5516
5517 if ((roi_x_max < scalerCropRegion->left) ||
5518 // right edge of roi window is left of scalar crop's left edge
5519 (roi_y_max < scalerCropRegion->top) ||
5520 // bottom edge of roi window is above scalar crop's top edge
5521 (roi->rect.left > crop_x_max) ||
5522 // left edge of roi window is beyond(right) of scalar crop's right edge
5523 (roi->rect.top > crop_y_max)){
5524 // top edge of roi windo is above scalar crop's top edge
5525 return false;
5526 }
5527 if (roi->rect.left < scalerCropRegion->left) {
5528 roi->rect.left = scalerCropRegion->left;
5529 }
5530 if (roi->rect.top < scalerCropRegion->top) {
5531 roi->rect.top = scalerCropRegion->top;
5532 }
5533 if (roi_x_max > crop_x_max) {
5534 roi_x_max = crop_x_max;
5535 }
5536 if (roi_y_max > crop_y_max) {
5537 roi_y_max = crop_y_max;
5538 }
5539 roi->rect.width = roi_x_max - roi->rect.left;
5540 roi->rect.height = roi_y_max - roi->rect.top;
5541 return true;
5542 }
5543
5544 /*===========================================================================
5545 * FUNCTION : convertLandmarks
5546 *
5547 * DESCRIPTION: helper method to extract the landmarks from face detection info
5548 *
5549 * PARAMETERS :
5550 * @face : cam_rect_t struct to convert
5551 * @landmarks : int32_t destination array
5552 *
5553 *
5554 *==========================================================================*/
convertLandmarks(cam_face_detection_info_t face,int32_t * landmarks)5555 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5556 {
5557 landmarks[0] = (int32_t)face.left_eye_center.x;
5558 landmarks[1] = (int32_t)face.left_eye_center.y;
5559 landmarks[2] = (int32_t)face.right_eye_center.x;
5560 landmarks[3] = (int32_t)face.right_eye_center.y;
5561 landmarks[4] = (int32_t)face.mouth_center.x;
5562 landmarks[5] = (int32_t)face.mouth_center.y;
5563 }
5564
5565 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5566 /*===========================================================================
5567 * FUNCTION : initCapabilities
5568 *
5569 * DESCRIPTION: initialize camera capabilities in static data struct
5570 *
5571 * PARAMETERS :
5572 * @cameraId : camera Id
5573 *
5574 * RETURN : int32_t type of status
5575 * NO_ERROR -- success
5576 * none-zero failure code
5577 *==========================================================================*/
initCapabilities(uint32_t cameraId)5578 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5579 {
5580 int rc = 0;
5581 mm_camera_vtbl_t *cameraHandle = NULL;
5582 QCamera3HeapMemory *capabilityHeap = NULL;
5583
5584 rc = camera_open((uint8_t)cameraId, &cameraHandle);
5585 if (rc || !cameraHandle) {
5586 ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5587 goto open_failed;
5588 }
5589
5590 capabilityHeap = new QCamera3HeapMemory(1);
5591 if (capabilityHeap == NULL) {
5592 ALOGE("%s: creation of capabilityHeap failed", __func__);
5593 goto heap_creation_failed;
5594 }
5595 /* Allocate memory for capability buffer */
5596 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5597 if(rc != OK) {
5598 ALOGE("%s: No memory for cappability", __func__);
5599 goto allocate_failed;
5600 }
5601
5602 /* Map memory for capability buffer */
5603 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5604 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5605 CAM_MAPPING_BUF_TYPE_CAPABILITY,
5606 capabilityHeap->getFd(0),
5607 sizeof(cam_capability_t));
5608 if(rc < 0) {
5609 ALOGE("%s: failed to map capability buffer", __func__);
5610 goto map_failed;
5611 }
5612
5613 /* Query Capability */
5614 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5615 if(rc < 0) {
5616 ALOGE("%s: failed to query capability",__func__);
5617 goto query_failed;
5618 }
5619 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5620 if (!gCamCapability[cameraId]) {
5621 ALOGE("%s: out of memory", __func__);
5622 goto query_failed;
5623 }
5624 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5625 sizeof(cam_capability_t));
5626 rc = 0;
5627
5628 query_failed:
5629 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5630 CAM_MAPPING_BUF_TYPE_CAPABILITY);
5631 map_failed:
5632 capabilityHeap->deallocate();
5633 allocate_failed:
5634 delete capabilityHeap;
5635 heap_creation_failed:
5636 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5637 cameraHandle = NULL;
5638 open_failed:
5639 return rc;
5640 }
5641
5642 /*==========================================================================
5643 * FUNCTION : get3Aversion
5644 *
5645 * DESCRIPTION: get the Q3A S/W version
5646 *
5647 * PARAMETERS :
5648 * @sw_version: Reference of Q3A structure which will hold version info upon
5649 * return
5650 *
5651 * RETURN : None
5652 *
5653 *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)5654 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5655 {
5656 if(gCamCapability[mCameraId])
5657 sw_version = gCamCapability[mCameraId]->q3a_version;
5658 else
5659 ALOGE("%s:Capability structure NULL!", __func__);
5660 }
5661
5662
5663 /*===========================================================================
5664 * FUNCTION : initParameters
5665 *
5666 * DESCRIPTION: initialize camera parameters
5667 *
5668 * PARAMETERS :
5669 *
5670 * RETURN : int32_t type of status
5671 * NO_ERROR -- success
5672 * none-zero failure code
5673 *==========================================================================*/
initParameters()5674 int QCamera3HardwareInterface::initParameters()
5675 {
5676 int rc = 0;
5677
5678 //Allocate Set Param Buffer
5679 mParamHeap = new QCamera3HeapMemory(1);
5680 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5681 if(rc != OK) {
5682 rc = NO_MEMORY;
5683 ALOGE("Failed to allocate SETPARM Heap memory");
5684 delete mParamHeap;
5685 mParamHeap = NULL;
5686 return rc;
5687 }
5688
5689 //Map memory for parameters buffer
5690 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5691 CAM_MAPPING_BUF_TYPE_PARM_BUF,
5692 mParamHeap->getFd(0),
5693 sizeof(metadata_buffer_t));
5694 if(rc < 0) {
5695 ALOGE("%s:failed to map SETPARM buffer",__func__);
5696 rc = FAILED_TRANSACTION;
5697 mParamHeap->deallocate();
5698 delete mParamHeap;
5699 mParamHeap = NULL;
5700 return rc;
5701 }
5702
5703 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5704
5705 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5706 return rc;
5707 }
5708
5709 /*===========================================================================
5710 * FUNCTION : deinitParameters
5711 *
5712 * DESCRIPTION: de-initialize camera parameters
5713 *
5714 * PARAMETERS :
5715 *
5716 * RETURN : NONE
5717 *==========================================================================*/
deinitParameters()5718 void QCamera3HardwareInterface::deinitParameters()
5719 {
5720 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5721 CAM_MAPPING_BUF_TYPE_PARM_BUF);
5722
5723 mParamHeap->deallocate();
5724 delete mParamHeap;
5725 mParamHeap = NULL;
5726
5727 mParameters = NULL;
5728
5729 free(mPrevParameters);
5730 mPrevParameters = NULL;
5731 }
5732
5733 /*===========================================================================
5734 * FUNCTION : calcMaxJpegSize
5735 *
5736 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5737 *
5738 * PARAMETERS :
5739 *
5740 * RETURN : max_jpeg_size
5741 *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)5742 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5743 {
5744 size_t max_jpeg_size = 0;
5745 size_t temp_width, temp_height;
5746 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5747 MAX_SIZES_CNT);
5748 for (size_t i = 0; i < count; i++) {
5749 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5750 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5751 if (temp_width * temp_height > max_jpeg_size ) {
5752 max_jpeg_size = temp_width * temp_height;
5753 }
5754 }
5755 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5756 return max_jpeg_size;
5757 }
5758
5759 /*===========================================================================
5760 * FUNCTION : getMaxRawSize
5761 *
5762 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5763 *
5764 * PARAMETERS :
5765 *
5766 * RETURN : Largest supported Raw Dimension
5767 *==========================================================================*/
getMaxRawSize(uint32_t camera_id)5768 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5769 {
5770 int max_width = 0;
5771 cam_dimension_t maxRawSize;
5772
5773 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5774 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5775 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5776 max_width = gCamCapability[camera_id]->raw_dim[i].width;
5777 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5778 }
5779 }
5780 return maxRawSize;
5781 }
5782
5783
5784 /*===========================================================================
5785 * FUNCTION : calcMaxJpegDim
5786 *
5787 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5788 *
5789 * PARAMETERS :
5790 *
5791 * RETURN : max_jpeg_dim
5792 *==========================================================================*/
calcMaxJpegDim()5793 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5794 {
5795 cam_dimension_t max_jpeg_dim;
5796 cam_dimension_t curr_jpeg_dim;
5797 max_jpeg_dim.width = 0;
5798 max_jpeg_dim.height = 0;
5799 curr_jpeg_dim.width = 0;
5800 curr_jpeg_dim.height = 0;
5801 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5802 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5803 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5804 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5805 max_jpeg_dim.width * max_jpeg_dim.height ) {
5806 max_jpeg_dim.width = curr_jpeg_dim.width;
5807 max_jpeg_dim.height = curr_jpeg_dim.height;
5808 }
5809 }
5810 return max_jpeg_dim;
5811 }
5812
5813 /*===========================================================================
5814 * FUNCTION : addStreamConfig
5815 *
5816 * DESCRIPTION: adds the stream configuration to the array
5817 *
5818 * PARAMETERS :
5819 * @available_stream_configs : pointer to stream configuration array
5820 * @scalar_format : scalar format
5821 * @dim : configuration dimension
5822 * @config_type : input or output configuration type
5823 *
5824 * RETURN : NONE
5825 *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)5826 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5827 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5828 {
5829 available_stream_configs.add(scalar_format);
5830 available_stream_configs.add(dim.width);
5831 available_stream_configs.add(dim.height);
5832 available_stream_configs.add(config_type);
5833 }
5834
5835
5836 /*===========================================================================
5837 * FUNCTION : initStaticMetadata
5838 *
5839 * DESCRIPTION: initialize the static metadata
5840 *
5841 * PARAMETERS :
5842 * @cameraId : camera Id
5843 *
5844 * RETURN : int32_t type of status
5845 * 0 -- success
5846 * non-zero failure code
5847 *==========================================================================*/
initStaticMetadata(uint32_t cameraId)5848 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5849 {
5850 int rc = 0;
5851 CameraMetadata staticInfo;
5852 size_t count = 0;
5853 bool limitedDevice = false;
5854 char prop[PROPERTY_VALUE_MAX];
5855
5856 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5857 * guaranteed, its advertised as limited device */
5858 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5859 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5860
5861 uint8_t supportedHwLvl = limitedDevice ?
5862 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5863 // No capability check done here to distinguish LEVEL_FULL from
5864 // LEVEL_3 - assuming this HAL will not run on devices that only
5865 // meet FULL spec
5866 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
5867
5868 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5869 &supportedHwLvl, 1);
5870
5871 bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5872 /*HAL 3 only*/
5873 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5874 &gCamCapability[cameraId]->min_focus_distance, 1);
5875
5876 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5877 &gCamCapability[cameraId]->hyper_focal_distance, 1);
5878
5879 /*should be using focal lengths but sensor doesn't provide that info now*/
5880 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5881 &gCamCapability[cameraId]->focal_length,
5882 1);
5883
5884 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5885 gCamCapability[cameraId]->apertures,
5886 gCamCapability[cameraId]->apertures_count);
5887
5888 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5889 gCamCapability[cameraId]->filter_densities,
5890 gCamCapability[cameraId]->filter_densities_count);
5891
5892
5893 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5894 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5895 gCamCapability[cameraId]->optical_stab_modes_count);
5896
5897 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5898 gCamCapability[cameraId]->lens_shading_map_size.height};
5899 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5900 lens_shading_map_size,
5901 sizeof(lens_shading_map_size)/sizeof(int32_t));
5902
5903 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5904 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5905
5906 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5907 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5908
5909 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5910 &gCamCapability[cameraId]->max_frame_duration, 1);
5911
5912 camera_metadata_rational baseGainFactor = {
5913 gCamCapability[cameraId]->base_gain_factor.numerator,
5914 gCamCapability[cameraId]->base_gain_factor.denominator};
5915 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5916 &baseGainFactor, 1);
5917
5918 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5919 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5920
5921 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5922 gCamCapability[cameraId]->pixel_array_size.height};
5923 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5924 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5925
5926 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5927 gCamCapability[cameraId]->active_array_size.top,
5928 gCamCapability[cameraId]->active_array_size.width,
5929 gCamCapability[cameraId]->active_array_size.height};
5930 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5931 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5932
5933 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5934 &gCamCapability[cameraId]->white_level, 1);
5935
5936 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5937 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5938
5939 bool hasBlackRegions = false;
5940 if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
5941 gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
5942 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
5943 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
5944 // Left
5945 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
5946 //Top
5947 opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
5948 // Width
5949 opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
5950 gCamCapability[cameraId]->optical_black_regions[i];
5951 // Height
5952 opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
5953 gCamCapability[cameraId]->optical_black_regions[i + 1];
5954 }
5955 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
5956 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
5957 hasBlackRegions = true;
5958 }
5959
5960 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5961 &gCamCapability[cameraId]->flash_charge_duration, 1);
5962
5963 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5964 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5965
5966 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
5967 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5968 ×tampSource, 1);
5969
5970 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5971 &gCamCapability[cameraId]->histogram_size, 1);
5972
5973 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5974 &gCamCapability[cameraId]->max_histogram_count, 1);
5975
5976 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5977 gCamCapability[cameraId]->sharpness_map_size.height};
5978
5979 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5980 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5981
5982 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5983 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5984
5985 int32_t scalar_formats[] = {
5986 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5987 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5988 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5989 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5990 HAL_PIXEL_FORMAT_RAW10,
5991 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5992 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5993 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5994 scalar_formats,
5995 scalar_formats_count);
5996
5997 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5998 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5999 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6000 count, MAX_SIZES_CNT, available_processed_sizes);
6001 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6002 available_processed_sizes, count * 2);
6003
6004 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6005 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6006 makeTable(gCamCapability[cameraId]->raw_dim,
6007 count, MAX_SIZES_CNT, available_raw_sizes);
6008 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6009 available_raw_sizes, count * 2);
6010
6011 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6012 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6013 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6014 count, MAX_SIZES_CNT, available_fps_ranges);
6015 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6016 available_fps_ranges, count * 2);
6017
6018 camera_metadata_rational exposureCompensationStep = {
6019 gCamCapability[cameraId]->exp_compensation_step.numerator,
6020 gCamCapability[cameraId]->exp_compensation_step.denominator};
6021 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6022 &exposureCompensationStep, 1);
6023
6024 Vector<uint8_t> availableVstabModes;
6025 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6026 char eis_prop[PROPERTY_VALUE_MAX];
6027 memset(eis_prop, 0, sizeof(eis_prop));
6028 property_get("persist.camera.eis.enable", eis_prop, "0");
6029 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6030 if (facingBack && eis_prop_set) {
6031 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6032 }
6033 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6034 availableVstabModes.array(), availableVstabModes.size());
6035
6036 /*HAL 1 and HAL 3 common*/
6037 float maxZoom = 4;
6038 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6039 &maxZoom, 1);
6040
6041 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
6042 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6043
6044 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6045 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6046 max3aRegions[2] = 0; /* AF not supported */
6047 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6048 max3aRegions, 3);
6049
6050 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6051 memset(prop, 0, sizeof(prop));
6052 property_get("persist.camera.facedetect", prop, "1");
6053 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6054 CDBG("%s: Support face detection mode: %d",
6055 __func__, supportedFaceDetectMode);
6056
6057 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6058 Vector<uint8_t> availableFaceDetectModes;
6059 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6060 if (supportedFaceDetectMode == 1) {
6061 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6062 } else if (supportedFaceDetectMode == 2) {
6063 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6064 } else if (supportedFaceDetectMode == 3) {
6065 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6066 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6067 } else {
6068 maxFaces = 0;
6069 }
6070 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6071 availableFaceDetectModes.array(),
6072 availableFaceDetectModes.size());
6073 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6074 (int32_t *)&maxFaces, 1);
6075
6076 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
6077 gCamCapability[cameraId]->exposure_compensation_max};
6078 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6079 exposureCompensationRange,
6080 sizeof(exposureCompensationRange)/sizeof(int32_t));
6081
6082 uint8_t lensFacing = (facingBack) ?
6083 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6084 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6085
6086 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6087 available_thumbnail_sizes,
6088 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6089
6090 /*all sizes will be clubbed into this tag*/
6091 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
6092 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6093 size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
6094 count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
6095 gCamCapability[cameraId]->max_downscale_factor);
6096 /*android.scaler.availableStreamConfigurations*/
6097 size_t max_stream_configs_size = count * scalar_formats_count * 4;
6098 Vector<int32_t> available_stream_configs;
6099 cam_dimension_t active_array_dim;
6100 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6101 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6102 /* Add input/output stream configurations for each scalar formats*/
6103 for (size_t j = 0; j < scalar_formats_count; j++) {
6104 switch (scalar_formats[j]) {
6105 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6106 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6107 case HAL_PIXEL_FORMAT_RAW10:
6108 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6109 addStreamConfig(available_stream_configs, scalar_formats[j],
6110 gCamCapability[cameraId]->raw_dim[i],
6111 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6112 }
6113 break;
6114 case HAL_PIXEL_FORMAT_BLOB:
6115 cam_dimension_t jpeg_size;
6116 for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
6117 jpeg_size.width = available_jpeg_sizes[i*2];
6118 jpeg_size.height = available_jpeg_sizes[i*2+1];
6119 addStreamConfig(available_stream_configs, scalar_formats[j],
6120 jpeg_size,
6121 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6122 }
6123 break;
6124 case HAL_PIXEL_FORMAT_YCbCr_420_888:
6125 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6126 default:
6127 cam_dimension_t largest_picture_size;
6128 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6129 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6130 addStreamConfig(available_stream_configs, scalar_formats[j],
6131 gCamCapability[cameraId]->picture_sizes_tbl[i],
6132 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6133 /* Book keep largest */
6134 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6135 >= largest_picture_size.width &&
6136 gCamCapability[cameraId]->picture_sizes_tbl[i].height
6137 >= largest_picture_size.height)
6138 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6139 }
6140 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6141 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6142 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6143 addStreamConfig(available_stream_configs, scalar_formats[j],
6144 largest_picture_size,
6145 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6146 }
6147 break;
6148 }
6149 }
6150
6151 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6152 available_stream_configs.array(), available_stream_configs.size());
6153 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6154 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6155
6156 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6157 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6158
6159 /* android.scaler.availableMinFrameDurations */
6160 int64_t available_min_durations[max_stream_configs_size];
6161 size_t idx = 0;
6162 for (size_t j = 0; j < scalar_formats_count; j++) {
6163 switch (scalar_formats[j]) {
6164 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6165 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6166 case HAL_PIXEL_FORMAT_RAW10:
6167 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6168 available_min_durations[idx] = scalar_formats[j];
6169 available_min_durations[idx+1] =
6170 gCamCapability[cameraId]->raw_dim[i].width;
6171 available_min_durations[idx+2] =
6172 gCamCapability[cameraId]->raw_dim[i].height;
6173 available_min_durations[idx+3] =
6174 gCamCapability[cameraId]->raw_min_duration[i];
6175 idx+=4;
6176 }
6177 break;
6178 default:
6179 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6180 available_min_durations[idx] = scalar_formats[j];
6181 available_min_durations[idx+1] =
6182 gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6183 available_min_durations[idx+2] =
6184 gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6185 available_min_durations[idx+3] =
6186 gCamCapability[cameraId]->picture_min_duration[i];
6187 idx+=4;
6188 }
6189 break;
6190 }
6191 }
6192 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6193 &available_min_durations[0], idx);
6194
6195 Vector<int32_t> available_hfr_configs;
6196 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6197 int32_t fps = 0;
6198 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6199 case CAM_HFR_MODE_60FPS:
6200 fps = 60;
6201 break;
6202 case CAM_HFR_MODE_90FPS:
6203 fps = 90;
6204 break;
6205 case CAM_HFR_MODE_120FPS:
6206 fps = 120;
6207 break;
6208 case CAM_HFR_MODE_150FPS:
6209 fps = 150;
6210 break;
6211 case CAM_HFR_MODE_180FPS:
6212 fps = 180;
6213 break;
6214 case CAM_HFR_MODE_210FPS:
6215 fps = 210;
6216 break;
6217 case CAM_HFR_MODE_240FPS:
6218 fps = 240;
6219 break;
6220 case CAM_HFR_MODE_480FPS:
6221 fps = 480;
6222 break;
6223 case CAM_HFR_MODE_OFF:
6224 case CAM_HFR_MODE_MAX:
6225 default:
6226 break;
6227 }
6228
6229 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6230 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6231 /* For each HFR frame rate, need to advertise one variable fps range
6232 * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6233 * [120, 120]. While camcorder preview alone is running [30, 120] is
6234 * set by the app. When video recording is started, [120, 120] is
6235 * set. This way sensor configuration does not change when recording
6236 * is started */
6237
6238 /* (width, height, fps_min, fps_max, batch_size_max) */
6239 available_hfr_configs.add(
6240 gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6241 available_hfr_configs.add(
6242 gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6243 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6244 available_hfr_configs.add(fps);
6245 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6246
6247 /* (width, height, fps_min, fps_max, batch_size_max) */
6248 available_hfr_configs.add(
6249 gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6250 available_hfr_configs.add(
6251 gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6252 available_hfr_configs.add(fps);
6253 available_hfr_configs.add(fps);
6254 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6255 }
6256 }
6257 //Advertise HFR capability only if the property is set
6258 memset(prop, 0, sizeof(prop));
6259 property_get("persist.camera.hal3hfr.enable", prop, "1");
6260 uint8_t hfrEnable = (uint8_t)atoi(prop);
6261
6262 if(hfrEnable && available_hfr_configs.array()) {
6263 staticInfo.update(
6264 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6265 available_hfr_configs.array(), available_hfr_configs.size());
6266 }
6267
6268 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6269 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6270 &max_jpeg_size, 1);
6271
6272 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6273 size_t size = 0;
6274 count = CAM_EFFECT_MODE_MAX;
6275 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6276 for (size_t i = 0; i < count; i++) {
6277 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6278 gCamCapability[cameraId]->supported_effects[i]);
6279 if (NAME_NOT_FOUND != val) {
6280 avail_effects[size] = (uint8_t)val;
6281 size++;
6282 }
6283 }
6284 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6285 avail_effects,
6286 size);
6287
6288 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6289 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6290 size_t supported_scene_modes_cnt = 0;
6291 count = CAM_SCENE_MODE_MAX;
6292 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6293 for (size_t i = 0; i < count; i++) {
6294 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6295 CAM_SCENE_MODE_OFF) {
6296 int val = lookupFwkName(SCENE_MODES_MAP,
6297 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6298 gCamCapability[cameraId]->supported_scene_modes[i]);
6299 if (NAME_NOT_FOUND != val) {
6300 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6301 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6302 supported_scene_modes_cnt++;
6303 }
6304 }
6305 }
6306 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6307 avail_scene_modes,
6308 supported_scene_modes_cnt);
6309
6310 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
6311 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6312 supported_scene_modes_cnt,
6313 CAM_SCENE_MODE_MAX,
6314 scene_mode_overrides,
6315 supported_indexes,
6316 cameraId);
6317
6318 if (supported_scene_modes_cnt == 0) {
6319 supported_scene_modes_cnt = 1;
6320 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6321 }
6322
6323 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6324 scene_mode_overrides, supported_scene_modes_cnt * 3);
6325
6326 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6327 ANDROID_CONTROL_MODE_AUTO,
6328 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6329 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6330 available_control_modes,
6331 3);
6332
6333 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6334 size = 0;
6335 count = CAM_ANTIBANDING_MODE_MAX;
6336 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6337 for (size_t i = 0; i < count; i++) {
6338 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6339 gCamCapability[cameraId]->supported_antibandings[i]);
6340 if (NAME_NOT_FOUND != val) {
6341 avail_antibanding_modes[size] = (uint8_t)val;
6342 size++;
6343 }
6344
6345 }
6346 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6347 avail_antibanding_modes,
6348 size);
6349
6350 uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6351 size = 0;
6352 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6353 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6354 if (0 == count) {
6355 avail_abberation_modes[0] =
6356 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6357 size++;
6358 } else {
6359 for (size_t i = 0; i < count; i++) {
6360 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6361 gCamCapability[cameraId]->aberration_modes[i]);
6362 if (NAME_NOT_FOUND != val) {
6363 avail_abberation_modes[size] = (uint8_t)val;
6364 size++;
6365 } else {
6366 ALOGE("%s: Invalid CAC mode %d", __func__,
6367 gCamCapability[cameraId]->aberration_modes[i]);
6368 break;
6369 }
6370 }
6371
6372 }
6373 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6374 avail_abberation_modes,
6375 size);
6376
6377 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6378 size = 0;
6379 count = CAM_FOCUS_MODE_MAX;
6380 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6381 for (size_t i = 0; i < count; i++) {
6382 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6383 gCamCapability[cameraId]->supported_focus_modes[i]);
6384 if (NAME_NOT_FOUND != val) {
6385 avail_af_modes[size] = (uint8_t)val;
6386 size++;
6387 }
6388 }
6389 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6390 avail_af_modes,
6391 size);
6392
6393 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6394 size = 0;
6395 count = CAM_WB_MODE_MAX;
6396 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6397 for (size_t i = 0; i < count; i++) {
6398 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6399 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6400 gCamCapability[cameraId]->supported_white_balances[i]);
6401 if (NAME_NOT_FOUND != val) {
6402 avail_awb_modes[size] = (uint8_t)val;
6403 size++;
6404 }
6405 }
6406 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6407 avail_awb_modes,
6408 size);
6409
6410 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6411 count = CAM_FLASH_FIRING_LEVEL_MAX;
6412 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6413 count);
6414 for (size_t i = 0; i < count; i++) {
6415 available_flash_levels[i] =
6416 gCamCapability[cameraId]->supported_firing_levels[i];
6417 }
6418 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6419 available_flash_levels, count);
6420
6421 uint8_t flashAvailable;
6422 if (gCamCapability[cameraId]->flash_available)
6423 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6424 else
6425 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6426 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6427 &flashAvailable, 1);
6428
6429 Vector<uint8_t> avail_ae_modes;
6430 count = CAM_AE_MODE_MAX;
6431 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6432 for (size_t i = 0; i < count; i++) {
6433 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6434 }
6435 if (flashAvailable) {
6436 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6437 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6438 }
6439 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6440 avail_ae_modes.array(),
6441 avail_ae_modes.size());
6442
6443 int32_t sensitivity_range[2];
6444 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6445 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6446 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6447 sensitivity_range,
6448 sizeof(sensitivity_range) / sizeof(int32_t));
6449
6450 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6451 &gCamCapability[cameraId]->max_analog_sensitivity,
6452 1);
6453
6454 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6455 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6456 &sensor_orientation,
6457 1);
6458
6459 int32_t max_output_streams[] = {
6460 MAX_STALLING_STREAMS,
6461 MAX_PROCESSED_STREAMS,
6462 MAX_RAW_STREAMS};
6463 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6464 max_output_streams,
6465 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6466
6467 uint8_t avail_leds = 0;
6468 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6469 &avail_leds, 0);
6470
6471 uint8_t focus_dist_calibrated;
6472 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6473 gCamCapability[cameraId]->focus_dist_calibrated);
6474 if (NAME_NOT_FOUND != val) {
6475 focus_dist_calibrated = (uint8_t)val;
6476 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6477 &focus_dist_calibrated, 1);
6478 }
6479
6480 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6481 size = 0;
6482 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6483 MAX_TEST_PATTERN_CNT);
6484 for (size_t i = 0; i < count; i++) {
6485 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6486 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6487 if (NAME_NOT_FOUND != testpatternMode) {
6488 avail_testpattern_modes[size] = testpatternMode;
6489 size++;
6490 }
6491 }
6492 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6493 avail_testpattern_modes,
6494 size);
6495
6496 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6497 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6498 &max_pipeline_depth,
6499 1);
6500
6501 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6502 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6503 &partial_result_count,
6504 1);
6505
6506 int32_t max_stall_duration = MAX_REPROCESS_STALL;
6507 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6508
6509 Vector<uint8_t> available_capabilities;
6510 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6511 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6512 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6513 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6514 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6515 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6517 if (hfrEnable && available_hfr_configs.array()) {
6518 available_capabilities.add(
6519 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6520 }
6521
6522 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6523 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6524 }
6525 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6526 available_capabilities.array(),
6527 available_capabilities.size());
6528
6529 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6530 //BURST_CAPTURE.
6531 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6532 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6533
6534 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6535 &aeLockAvailable, 1);
6536
6537 //awbLockAvailable to be set to true if capabilities has
6538 //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6539 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6540 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6541
6542 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6543 &awbLockAvailable, 1);
6544
6545 int32_t max_input_streams = 1;
6546 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6547 &max_input_streams,
6548 1);
6549
6550 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6551 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6552 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6553 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6554 HAL_PIXEL_FORMAT_YCbCr_420_888};
6555 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6556 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6557
6558 int32_t max_latency = (limitedDevice) ?
6559 CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6560 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6561 &max_latency,
6562 1);
6563
6564 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6565 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6566 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6567 available_hot_pixel_modes,
6568 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6569
6570 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6571 ANDROID_SHADING_MODE_FAST,
6572 ANDROID_SHADING_MODE_HIGH_QUALITY};
6573 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6574 available_shading_modes,
6575 3);
6576
6577 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6578 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6579 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6580 available_lens_shading_map_modes,
6581 2);
6582
6583 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6584 ANDROID_EDGE_MODE_FAST,
6585 ANDROID_EDGE_MODE_HIGH_QUALITY,
6586 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6587 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6588 available_edge_modes,
6589 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6590
6591 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6592 ANDROID_NOISE_REDUCTION_MODE_FAST,
6593 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6594 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6595 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6596 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6597 available_noise_red_modes,
6598 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6599
6600 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6601 ANDROID_TONEMAP_MODE_FAST,
6602 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6603 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6604 available_tonemap_modes,
6605 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6606
6607 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6608 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6609 available_hot_pixel_map_modes,
6610 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6611
6612 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6613 gCamCapability[cameraId]->reference_illuminant1);
6614 if (NAME_NOT_FOUND != val) {
6615 uint8_t fwkReferenceIlluminant = (uint8_t)val;
6616 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6617 }
6618
6619 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6620 gCamCapability[cameraId]->reference_illuminant2);
6621 if (NAME_NOT_FOUND != val) {
6622 uint8_t fwkReferenceIlluminant = (uint8_t)val;
6623 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6624 }
6625
6626 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6627 (void *)gCamCapability[cameraId]->forward_matrix1,
6628 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6629
6630 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6631 (void *)gCamCapability[cameraId]->forward_matrix2,
6632 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6633
6634 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6635 (void *)gCamCapability[cameraId]->color_transform1,
6636 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6637
6638 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6639 (void *)gCamCapability[cameraId]->color_transform2,
6640 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6641
6642 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6643 (void *)gCamCapability[cameraId]->calibration_transform1,
6644 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6645
6646 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6647 (void *)gCamCapability[cameraId]->calibration_transform2,
6648 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6649
6650 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6651 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6652 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6653 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6654 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6655 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6656 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6657 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6658 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6659 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6660 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6661 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6662 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6663 ANDROID_JPEG_GPS_COORDINATES,
6664 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6665 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6666 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6667 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6668 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6669 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6670 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6671 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6672 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6673 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6674 ANDROID_STATISTICS_FACE_DETECT_MODE,
6675 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6676 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6677 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6678 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
6679
6680 size_t request_keys_cnt =
6681 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6682 Vector<int32_t> available_request_keys;
6683 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6684 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6685 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6686 }
6687
6688 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6689 available_request_keys.array(), available_request_keys.size());
6690
6691 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6692 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6693 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6694 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6695 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6696 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6697 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6698 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6699 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6700 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6701 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6702 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6703 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6704 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6705 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6706 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6707 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6708 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6709 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6710 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6711 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6712 ANDROID_STATISTICS_FACE_SCORES,
6713 ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
6714 ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
6715 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
6716 size_t result_keys_cnt =
6717 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6718
6719 Vector<int32_t> available_result_keys;
6720 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6721 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6722 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6723 }
6724 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6725 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6726 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6727 }
6728 if (supportedFaceDetectMode == 1) {
6729 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6730 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6731 } else if ((supportedFaceDetectMode == 2) ||
6732 (supportedFaceDetectMode == 3)) {
6733 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6734 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6735 }
6736 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6737 available_result_keys.array(), available_result_keys.size());
6738
6739 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6740 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6741 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6742 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6743 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6744 ANDROID_SCALER_CROPPING_TYPE,
6745 ANDROID_SYNC_MAX_LATENCY,
6746 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6747 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6748 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6749 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6750 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6751 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6752 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6753 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6754 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6755 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6756 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6757 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6758 ANDROID_LENS_FACING,
6759 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6760 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6761 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6762 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6763 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6764 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6765 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6766 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6767 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6768 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6769 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6770 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6771 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6772 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6773 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6774 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6775 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6776 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6777 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6778 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6779 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6780 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6781 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6782 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6783 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6784 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6785 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6786 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6787 ANDROID_TONEMAP_MAX_CURVE_POINTS,
6788 ANDROID_CONTROL_AVAILABLE_MODES,
6789 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6790 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6791 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6792 ANDROID_SHADING_AVAILABLE_MODES,
6793 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6794
6795 Vector<int32_t> available_characteristics_keys;
6796 available_characteristics_keys.appendArray(characteristics_keys_basic,
6797 sizeof(characteristics_keys_basic)/sizeof(int32_t));
6798 if (hasBlackRegions) {
6799 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
6800 }
6801 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6802 available_characteristics_keys.array(),
6803 available_characteristics_keys.size());
6804
6805 /*available stall durations depend on the hw + sw and will be different for different devices */
6806 /*have to add for raw after implementation*/
6807 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6808 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6809
6810 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6811 size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6812 MAX_SIZES_CNT);
6813 size_t available_stall_size = count * 4;
6814 int64_t available_stall_durations[available_stall_size];
6815 idx = 0;
6816 for (uint32_t j = 0; j < stall_formats_count; j++) {
6817 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6818 for (uint32_t i = 0; i < count; i++) {
6819 available_stall_durations[idx] = stall_formats[j];
6820 available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6821 available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6822 available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6823 idx+=4;
6824 }
6825 } else {
6826 for (uint32_t i = 0; i < raw_count; i++) {
6827 available_stall_durations[idx] = stall_formats[j];
6828 available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6829 available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6830 available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6831 idx+=4;
6832 }
6833 }
6834 }
6835 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6836 available_stall_durations,
6837 idx);
6838 //QCAMERA3_OPAQUE_RAW
6839 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6840 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6841 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6842 case LEGACY_RAW:
6843 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6844 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6845 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6846 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6847 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6848 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6849 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6850 break;
6851 case MIPI_RAW:
6852 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6853 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6854 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6855 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6856 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6857 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6858 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6859 break;
6860 default:
6861 ALOGE("%s: unknown opaque_raw_format %d", __func__,
6862 gCamCapability[cameraId]->opaque_raw_fmt);
6863 break;
6864 }
6865 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6866
6867 int32_t strides[3*raw_count];
6868 for (size_t i = 0; i < raw_count; i++) {
6869 cam_stream_buf_plane_info_t buf_planes;
6870 strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6871 strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6872 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6873 &gCamCapability[cameraId]->padding_info, &buf_planes);
6874 strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6875 }
6876 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6877 3*raw_count);
6878
6879 gStaticMetadata[cameraId] = staticInfo.release();
6880 return rc;
6881 }
6882
6883 /*===========================================================================
6884 * FUNCTION : makeTable
6885 *
6886 * DESCRIPTION: make a table of sizes
6887 *
6888 * PARAMETERS :
6889 *
6890 *
6891 *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)6892 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6893 size_t max_size, int32_t *sizeTable)
6894 {
6895 size_t j = 0;
6896 if (size > max_size) {
6897 size = max_size;
6898 }
6899 for (size_t i = 0; i < size; i++) {
6900 sizeTable[j] = dimTable[i].width;
6901 sizeTable[j+1] = dimTable[i].height;
6902 j+=2;
6903 }
6904 }
6905
6906 /*===========================================================================
6907 * FUNCTION : makeFPSTable
6908 *
6909 * DESCRIPTION: make a table of fps ranges
6910 *
6911 * PARAMETERS :
6912 *
6913 *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)6914 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6915 size_t max_size, int32_t *fpsRangesTable)
6916 {
6917 size_t j = 0;
6918 if (size > max_size) {
6919 size = max_size;
6920 }
6921 for (size_t i = 0; i < size; i++) {
6922 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6923 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6924 j+=2;
6925 }
6926 }
6927
6928 /*===========================================================================
6929 * FUNCTION : makeOverridesList
6930 *
6931 * DESCRIPTION: make a list of scene mode overrides
6932 *
6933 * PARAMETERS :
6934 *
6935 *
6936 *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)6937 void QCamera3HardwareInterface::makeOverridesList(
6938 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6939 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6940 {
6941 /*daemon will give a list of overrides for all scene modes.
6942 However we should send the fwk only the overrides for the scene modes
6943 supported by the framework*/
6944 size_t j = 0;
6945 if (size > max_size) {
6946 size = max_size;
6947 }
6948 size_t focus_count = CAM_FOCUS_MODE_MAX;
6949 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6950 focus_count);
6951 for (size_t i = 0; i < size; i++) {
6952 bool supt = false;
6953 size_t index = supported_indexes[i];
6954 overridesList[j] = gCamCapability[camera_id]->flash_available ?
6955 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6956 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6957 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6958 overridesTable[index].awb_mode);
6959 if (NAME_NOT_FOUND != val) {
6960 overridesList[j+1] = (uint8_t)val;
6961 }
6962 uint8_t focus_override = overridesTable[index].af_mode;
6963 for (size_t k = 0; k < focus_count; k++) {
6964 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6965 supt = true;
6966 break;
6967 }
6968 }
6969 if (supt) {
6970 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6971 focus_override);
6972 if (NAME_NOT_FOUND != val) {
6973 overridesList[j+2] = (uint8_t)val;
6974 }
6975 } else {
6976 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6977 }
6978 j+=3;
6979 }
6980 }
6981
6982 /*===========================================================================
6983 * FUNCTION : filterJpegSizes
6984 *
6985 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6986 * could be downscaled to
6987 *
6988 * PARAMETERS :
6989 *
6990 * RETURN : length of jpegSizes array
6991 *==========================================================================*/
6992
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)6993 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6994 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6995 uint8_t downscale_factor)
6996 {
6997 if (0 == downscale_factor) {
6998 downscale_factor = 1;
6999 }
7000
7001 int32_t min_width = active_array_size.width / downscale_factor;
7002 int32_t min_height = active_array_size.height / downscale_factor;
7003 size_t jpegSizesCnt = 0;
7004 if (processedSizesCnt > maxCount) {
7005 processedSizesCnt = maxCount;
7006 }
7007 for (size_t i = 0; i < processedSizesCnt; i+=2) {
7008 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7009 jpegSizes[jpegSizesCnt] = processedSizes[i];
7010 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7011 jpegSizesCnt += 2;
7012 }
7013 }
7014 return jpegSizesCnt;
7015 }
7016
7017 /*===========================================================================
7018 * FUNCTION : getPreviewHalPixelFormat
7019 *
7020 * DESCRIPTION: convert the format to type recognized by framework
7021 *
7022 * PARAMETERS : format : the format from backend
7023 *
7024 ** RETURN : format recognized by framework
7025 *
7026 *==========================================================================*/
getScalarFormat(int32_t format)7027 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
7028 {
7029 int32_t halPixelFormat;
7030
7031 switch (format) {
7032 case CAM_FORMAT_YUV_420_NV12:
7033 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
7034 break;
7035 case CAM_FORMAT_YUV_420_NV21:
7036 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7037 break;
7038 case CAM_FORMAT_YUV_420_NV21_ADRENO:
7039 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
7040 break;
7041 case CAM_FORMAT_YUV_420_YV12:
7042 halPixelFormat = HAL_PIXEL_FORMAT_YV12;
7043 break;
7044 case CAM_FORMAT_YUV_422_NV16:
7045 case CAM_FORMAT_YUV_422_NV61:
7046 default:
7047 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7048 break;
7049 }
7050 return halPixelFormat;
7051 }
7052
7053 /*===========================================================================
7054 * FUNCTION : computeNoiseModelEntryS
7055 *
7056 * DESCRIPTION: function to map a given sensitivity to the S noise
7057 * model parameters in the DNG noise model.
7058 *
7059 * PARAMETERS : sens : the sensor sensitivity
7060 *
7061 ** RETURN : S (sensor amplification) noise
7062 *
7063 *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)7064 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7065 double s = gCamCapability[mCameraId]->gradient_S * sens +
7066 gCamCapability[mCameraId]->offset_S;
7067 return ((s < 0.0) ? 0.0 : s);
7068 }
7069
7070 /*===========================================================================
7071 * FUNCTION : computeNoiseModelEntryO
7072 *
7073 * DESCRIPTION: function to map a given sensitivity to the O noise
7074 * model parameters in the DNG noise model.
7075 *
7076 * PARAMETERS : sens : the sensor sensitivity
7077 *
7078 ** RETURN : O (sensor readout) noise
7079 *
7080 *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)7081 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7082 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7083 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7084 1.0 : (1.0 * sens / max_analog_sens);
7085 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7086 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7087 return ((o < 0.0) ? 0.0 : o);
7088 }
7089
7090 /*===========================================================================
7091 * FUNCTION : getSensorSensitivity
7092 *
7093 * DESCRIPTION: convert iso_mode to an integer value
7094 *
7095 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7096 *
7097 ** RETURN : sensitivity supported by sensor
7098 *
7099 *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)7100 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7101 {
7102 int32_t sensitivity;
7103
7104 switch (iso_mode) {
7105 case CAM_ISO_MODE_100:
7106 sensitivity = 100;
7107 break;
7108 case CAM_ISO_MODE_200:
7109 sensitivity = 200;
7110 break;
7111 case CAM_ISO_MODE_400:
7112 sensitivity = 400;
7113 break;
7114 case CAM_ISO_MODE_800:
7115 sensitivity = 800;
7116 break;
7117 case CAM_ISO_MODE_1600:
7118 sensitivity = 1600;
7119 break;
7120 default:
7121 sensitivity = -1;
7122 break;
7123 }
7124 return sensitivity;
7125 }
7126
7127 /*===========================================================================
7128 * FUNCTION : getCamInfo
7129 *
7130 * DESCRIPTION: query camera capabilities
7131 *
7132 * PARAMETERS :
7133 * @cameraId : camera Id
7134 * @info : camera info struct to be filled in with camera capabilities
7135 *
7136 * RETURN : int type of status
7137 * NO_ERROR -- success
7138 * none-zero failure code
7139 *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)7140 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7141 struct camera_info *info)
7142 {
7143 ATRACE_CALL();
7144 int rc = 0;
7145
7146 pthread_mutex_lock(&gCamLock);
7147 if (NULL == gCamCapability[cameraId]) {
7148 rc = initCapabilities(cameraId);
7149 if (rc < 0) {
7150 pthread_mutex_unlock(&gCamLock);
7151 return rc;
7152 }
7153 }
7154
7155 if (NULL == gStaticMetadata[cameraId]) {
7156 rc = initStaticMetadata(cameraId);
7157 if (rc < 0) {
7158 pthread_mutex_unlock(&gCamLock);
7159 return rc;
7160 }
7161 }
7162
7163 switch(gCamCapability[cameraId]->position) {
7164 case CAM_POSITION_BACK:
7165 info->facing = CAMERA_FACING_BACK;
7166 break;
7167
7168 case CAM_POSITION_FRONT:
7169 info->facing = CAMERA_FACING_FRONT;
7170 break;
7171
7172 default:
7173 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
7174 rc = -1;
7175 break;
7176 }
7177
7178
7179 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7180 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7181 info->static_camera_characteristics = gStaticMetadata[cameraId];
7182
7183 //For now assume both cameras can operate independently.
7184 info->conflicting_devices = NULL;
7185 info->conflicting_devices_length = 0;
7186
7187 //resource cost is 100 * MIN(1.0, m/M),
7188 //where m is throughput requirement with maximum stream configuration
7189 //and M is CPP maximum throughput.
7190 float max_fps = 0.0;
7191 for (uint32_t i = 0;
7192 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7193 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7194 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7195 }
7196 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7197 gCamCapability[cameraId]->active_array_size.width *
7198 gCamCapability[cameraId]->active_array_size.height * max_fps /
7199 gCamCapability[cameraId]->max_pixel_bandwidth;
7200 info->resource_cost = 100 * MIN(1.0, ratio);
7201 ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
7202 info->resource_cost);
7203
7204 pthread_mutex_unlock(&gCamLock);
7205 return rc;
7206 }
7207
7208 /*===========================================================================
7209 * FUNCTION : translateCapabilityToMetadata
7210 *
7211 * DESCRIPTION: translate the capability into camera_metadata_t
7212 *
7213 * PARAMETERS : type of the request
7214 *
7215 *
7216 * RETURN : success: camera_metadata_t*
7217 * failure: NULL
7218 *
7219 *==========================================================================*/
translateCapabilityToMetadata(int type)7220 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7221 {
7222 if (mDefaultMetadata[type] != NULL) {
7223 return mDefaultMetadata[type];
7224 }
7225 //first time we are handling this request
7226 //fill up the metadata structure using the wrapper class
7227 CameraMetadata settings;
7228 //translate from cam_capability_t to camera_metadata_tag_t
7229 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7230 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7231 int32_t defaultRequestID = 0;
7232 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7233
7234 /* OIS disable */
7235 char ois_prop[PROPERTY_VALUE_MAX];
7236 memset(ois_prop, 0, sizeof(ois_prop));
7237 property_get("persist.camera.ois.disable", ois_prop, "0");
7238 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7239
7240 /* Force video to use OIS */
7241 char videoOisProp[PROPERTY_VALUE_MAX];
7242 memset(videoOisProp, 0, sizeof(videoOisProp));
7243 property_get("persist.camera.ois.video", videoOisProp, "1");
7244 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7245
7246 // EIS enable/disable
7247 char eis_prop[PROPERTY_VALUE_MAX];
7248 memset(eis_prop, 0, sizeof(eis_prop));
7249 property_get("persist.camera.eis.enable", eis_prop, "0");
7250 const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7251
7252 // Hybrid AE enable/disable
7253 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7254 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7255 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7256 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7257
7258 const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7259 // This is a bit hacky. EIS is enabled only when the above setprop
7260 // is set to non-zero value and on back camera (for 2015 Nexus).
7261 // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7262 // configureStream is called before this function. In other words,
7263 // we cannot guarantee the app will call configureStream before
7264 // calling createDefaultRequest.
7265 const bool eisEnabled = facingBack && eis_prop_set;
7266
7267 uint8_t controlIntent = 0;
7268 uint8_t focusMode;
7269 uint8_t vsMode;
7270 uint8_t optStabMode;
7271 uint8_t cacMode;
7272 uint8_t edge_mode;
7273 uint8_t noise_red_mode;
7274 uint8_t tonemap_mode;
7275 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7276 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7277 switch (type) {
7278 case CAMERA3_TEMPLATE_PREVIEW:
7279 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7280 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7281 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7282 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7283 edge_mode = ANDROID_EDGE_MODE_FAST;
7284 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7285 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7286 break;
7287 case CAMERA3_TEMPLATE_STILL_CAPTURE:
7288 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7289 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7290 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7291 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7292 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7293 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7294 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7295 break;
7296 case CAMERA3_TEMPLATE_VIDEO_RECORD:
7297 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7298 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7299 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7300 if (eisEnabled) {
7301 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7302 }
7303 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7304 edge_mode = ANDROID_EDGE_MODE_FAST;
7305 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7306 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7307 if (forceVideoOis)
7308 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7309 break;
7310 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7311 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7312 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7313 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7314 if (eisEnabled) {
7315 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7316 }
7317 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7318 edge_mode = ANDROID_EDGE_MODE_FAST;
7319 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7320 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7321 if (forceVideoOis)
7322 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7323 break;
7324 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7325 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7326 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7327 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7328 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7329 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7330 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7331 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7332 break;
7333 case CAMERA3_TEMPLATE_MANUAL:
7334 edge_mode = ANDROID_EDGE_MODE_FAST;
7335 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7336 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7337 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7338 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7339 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7340 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7341 break;
7342 default:
7343 edge_mode = ANDROID_EDGE_MODE_FAST;
7344 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7345 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7346 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7347 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7348 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7349 break;
7350 }
7351 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7352 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7353 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7354 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7355 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7356 }
7357 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7358
7359 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7360 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7361 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7362 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7363 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7364 || ois_disable)
7365 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7366 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7367
7368 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7369 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7370
7371 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7372 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7373
7374 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7375 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7376
7377 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7378 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7379
7380 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7381 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7382
7383 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7384 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7385
7386 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7387 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7388
7389 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7390 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7391
7392 /*flash*/
7393 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7394 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7395
7396 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7397 settings.update(ANDROID_FLASH_FIRING_POWER,
7398 &flashFiringLevel, 1);
7399
7400 /* lens */
7401 float default_aperture = gCamCapability[mCameraId]->apertures[0];
7402 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7403
7404 if (gCamCapability[mCameraId]->filter_densities_count) {
7405 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7406 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7407 gCamCapability[mCameraId]->filter_densities_count);
7408 }
7409
7410 float default_focal_length = gCamCapability[mCameraId]->focal_length;
7411 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7412
7413 float default_focus_distance = 0;
7414 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7415
7416 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7417 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7418
7419 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7420 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7421
7422 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7423 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7424
7425 /* face detection (default to OFF) */
7426 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7427 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7428
7429 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7430 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7431
7432 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7433 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7434
7435 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7436 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7437
7438 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7439 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7440
7441 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7442 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7443
7444 /* Exposure time(Update the Min Exposure Time)*/
7445 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7446 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7447
7448 /* frame duration */
7449 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7450 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7451
7452 /* sensitivity */
7453 static const int32_t default_sensitivity = 100;
7454 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7455
7456 /*edge mode*/
7457 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7458
7459 /*noise reduction mode*/
7460 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7461
7462 /*color correction mode*/
7463 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7464 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7465
7466 /*transform matrix mode*/
7467 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7468
7469 int32_t scaler_crop_region[4];
7470 scaler_crop_region[0] = 0;
7471 scaler_crop_region[1] = 0;
7472 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7473 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7474 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7475
7476 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7477 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7478
7479 /*focus distance*/
7480 float focus_distance = 0.0;
7481 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7482
7483 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7484 float max_range = 0.0;
7485 float max_fixed_fps = 0.0;
7486 int32_t fps_range[2] = {0, 0};
7487 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7488 i++) {
7489 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7490 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7491 if (type == CAMERA3_TEMPLATE_PREVIEW ||
7492 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7493 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7494 if (range > max_range) {
7495 fps_range[0] =
7496 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7497 fps_range[1] =
7498 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7499 max_range = range;
7500 }
7501 } else {
7502 if (range < 0.01 && max_fixed_fps <
7503 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7504 fps_range[0] =
7505 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7506 fps_range[1] =
7507 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7508 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7509 }
7510 }
7511 }
7512 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7513
7514 /*precapture trigger*/
7515 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7516 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7517
7518 /*af trigger*/
7519 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7520 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7521
7522 /* ae & af regions */
7523 int32_t active_region[] = {
7524 gCamCapability[mCameraId]->active_array_size.left,
7525 gCamCapability[mCameraId]->active_array_size.top,
7526 gCamCapability[mCameraId]->active_array_size.left +
7527 gCamCapability[mCameraId]->active_array_size.width,
7528 gCamCapability[mCameraId]->active_array_size.top +
7529 gCamCapability[mCameraId]->active_array_size.height,
7530 0};
7531 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7532 sizeof(active_region) / sizeof(active_region[0]));
7533 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7534 sizeof(active_region) / sizeof(active_region[0]));
7535
7536 /* black level lock */
7537 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7538 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7539
7540 /* lens shading map mode */
7541 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7542 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7543 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7544 }
7545 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7546
7547 //special defaults for manual template
7548 if (type == CAMERA3_TEMPLATE_MANUAL) {
7549 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7550 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7551
7552 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7553 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7554
7555 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7556 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7557
7558 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7559 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7560
7561 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7562 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7563
7564 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7565 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7566 }
7567
7568
7569 /* TNR
7570 * We'll use this location to determine which modes TNR will be set.
7571 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7572 * This is not to be confused with linking on a per stream basis that decision
7573 * is still on per-session basis and will be handled as part of config stream
7574 */
7575 uint8_t tnr_enable = 0;
7576
7577 if (m_bTnrPreview || m_bTnrVideo) {
7578
7579 switch (type) {
7580 case CAMERA3_TEMPLATE_VIDEO_RECORD:
7581 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7582 tnr_enable = 1;
7583 break;
7584
7585 default:
7586 tnr_enable = 0;
7587 break;
7588 }
7589
7590 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7591 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7592 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7593
7594 CDBG("%s: TNR:%d with process plate %d for template:%d",
7595 __func__, tnr_enable, tnr_process_type, type);
7596 }
7597
7598 /* CDS default */
7599 char prop[PROPERTY_VALUE_MAX];
7600 memset(prop, 0, sizeof(prop));
7601 property_get("persist.camera.CDS", prop, "Auto");
7602 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7603 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7604 if (CAM_CDS_MODE_MAX == cds_mode) {
7605 cds_mode = CAM_CDS_MODE_AUTO;
7606 }
7607 m_CdsPreference = cds_mode;
7608
7609 /* Disabling CDS in templates which have TNR enabled*/
7610 if (tnr_enable)
7611 cds_mode = CAM_CDS_MODE_OFF;
7612
7613 int32_t mode = cds_mode;
7614 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7615
7616 /* hybrid ae */
7617 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7618
7619 mDefaultMetadata[type] = settings.release();
7620
7621 return mDefaultMetadata[type];
7622 }
7623
7624 /*===========================================================================
7625 * FUNCTION : setFrameParameters
7626 *
7627 * DESCRIPTION: set parameters per frame as requested in the metadata from
7628 * framework
7629 *
7630 * PARAMETERS :
7631 * @request : request that needs to be serviced
7632 * @streamID : Stream ID of all the requested streams
7633 * @blob_request: Whether this request is a blob request or not
7634 *
7635 * RETURN : success: NO_ERROR
7636 * failure:
7637 *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamID,int blob_request,uint32_t snapshotStreamId)7638 int QCamera3HardwareInterface::setFrameParameters(
7639 camera3_capture_request_t *request,
7640 cam_stream_ID_t streamID,
7641 int blob_request,
7642 uint32_t snapshotStreamId)
7643 {
7644 /*translate from camera_metadata_t type to parm_type_t*/
7645 int rc = 0;
7646 int32_t hal_version = CAM_HAL_V3;
7647
7648 clear_metadata_buffer(mParameters);
7649 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7650 ALOGE("%s: Failed to set hal version in the parameters", __func__);
7651 return BAD_VALUE;
7652 }
7653
7654 /*we need to update the frame number in the parameters*/
7655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7656 request->frame_number)) {
7657 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7658 return BAD_VALUE;
7659 }
7660
7661 /* Update stream id of all the requested buffers */
7662 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7663 ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7664 return BAD_VALUE;
7665 }
7666
7667 if (mUpdateDebugLevel) {
7668 uint32_t dummyDebugLevel = 0;
7669 /* The value of dummyDebugLevel is irrelavent. On
7670 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7672 dummyDebugLevel)) {
7673 ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7674 return BAD_VALUE;
7675 }
7676 mUpdateDebugLevel = false;
7677 }
7678
7679 if(request->settings != NULL){
7680 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7681 if (blob_request)
7682 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7683 }
7684
7685 return rc;
7686 }
7687
7688 /*===========================================================================
7689 * FUNCTION : setReprocParameters
7690 *
7691 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7692 * return it.
7693 *
7694 * PARAMETERS :
7695 * @request : request that needs to be serviced
7696 *
7697 * RETURN : success: NO_ERROR
7698 * failure:
7699 *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)7700 int32_t QCamera3HardwareInterface::setReprocParameters(
7701 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7702 uint32_t snapshotStreamId)
7703 {
7704 /*translate from camera_metadata_t type to parm_type_t*/
7705 int rc = 0;
7706
7707 if (NULL == request->settings){
7708 ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7709 return BAD_VALUE;
7710 }
7711
7712 if (NULL == reprocParam) {
7713 ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7714 return BAD_VALUE;
7715 }
7716 clear_metadata_buffer(reprocParam);
7717
7718 /*we need to update the frame number in the parameters*/
7719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7720 request->frame_number)) {
7721 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7722 return BAD_VALUE;
7723 }
7724
7725 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7726 if (rc < 0) {
7727 ALOGE("%s: Failed to translate reproc request", __func__);
7728 return rc;
7729 }
7730
7731 CameraMetadata frame_settings;
7732 frame_settings = request->settings;
7733 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7734 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7735 int32_t *crop_count =
7736 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7737 int32_t *crop_data =
7738 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7739 int32_t *roi_map =
7740 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7741 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7742 cam_crop_data_t crop_meta;
7743 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7744 crop_meta.num_of_streams = 1;
7745 crop_meta.crop_info[0].crop.left = crop_data[0];
7746 crop_meta.crop_info[0].crop.top = crop_data[1];
7747 crop_meta.crop_info[0].crop.width = crop_data[2];
7748 crop_meta.crop_info[0].crop.height = crop_data[3];
7749
7750 crop_meta.crop_info[0].roi_map.left =
7751 roi_map[0];
7752 crop_meta.crop_info[0].roi_map.top =
7753 roi_map[1];
7754 crop_meta.crop_info[0].roi_map.width =
7755 roi_map[2];
7756 crop_meta.crop_info[0].roi_map.height =
7757 roi_map[3];
7758
7759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7760 rc = BAD_VALUE;
7761 }
7762 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7763 __func__,
7764 request->input_buffer->stream,
7765 crop_meta.crop_info[0].crop.left,
7766 crop_meta.crop_info[0].crop.top,
7767 crop_meta.crop_info[0].crop.width,
7768 crop_meta.crop_info[0].crop.height);
7769 CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7770 __func__,
7771 request->input_buffer->stream,
7772 crop_meta.crop_info[0].roi_map.left,
7773 crop_meta.crop_info[0].roi_map.top,
7774 crop_meta.crop_info[0].roi_map.width,
7775 crop_meta.crop_info[0].roi_map.height);
7776 } else {
7777 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7778 }
7779 } else {
7780 ALOGE("%s: No crop data from matching output stream", __func__);
7781 }
7782
7783 /* These settings are not needed for regular requests so handle them specially for
7784 reprocess requests; information needed for EXIF tags */
7785 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7786 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7787 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7788 if (NAME_NOT_FOUND != val) {
7789 uint32_t flashMode = (uint32_t)val;
7790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7791 rc = BAD_VALUE;
7792 }
7793 } else {
7794 ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7795 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7796 }
7797 } else {
7798 CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7799 }
7800
7801 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7802 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7804 rc = BAD_VALUE;
7805 }
7806 } else {
7807 CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7808 }
7809
7810 return rc;
7811 }
7812
7813 /*===========================================================================
7814 * FUNCTION : saveRequestSettings
7815 *
7816 * DESCRIPTION: Add any settings that might have changed to the request settings
7817 * and save the settings to be applied on the frame
7818 *
7819 * PARAMETERS :
7820 * @jpegMetadata : the extracted and/or modified jpeg metadata
7821 * @request : request with initial settings
7822 *
7823 * RETURN :
7824 * camera_metadata_t* : pointer to the saved request settings
7825 *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)7826 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7827 const CameraMetadata &jpegMetadata,
7828 camera3_capture_request_t *request)
7829 {
7830 camera_metadata_t *resultMetadata;
7831 CameraMetadata camMetadata;
7832 camMetadata = request->settings;
7833
7834 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7835 int32_t thumbnail_size[2];
7836 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7837 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7838 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7839 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7840 }
7841
7842 resultMetadata = camMetadata.release();
7843 return resultMetadata;
7844 }
7845
7846 /*===========================================================================
7847 * FUNCTION : setHalFpsRange
7848 *
7849 * DESCRIPTION: set FPS range parameter
7850 *
7851 *
7852 * PARAMETERS :
7853 * @settings : Metadata from framework
7854 * @hal_metadata: Metadata buffer
7855 *
7856 *
7857 * RETURN : success: NO_ERROR
7858 * failure:
7859 *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)7860 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7861 metadata_buffer_t *hal_metadata)
7862 {
7863 int32_t rc = NO_ERROR;
7864 cam_fps_range_t fps_range;
7865 fps_range.min_fps = (float)
7866 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7867 fps_range.max_fps = (float)
7868 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7869 fps_range.video_min_fps = fps_range.min_fps;
7870 fps_range.video_max_fps = fps_range.max_fps;
7871
7872 CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7873 fps_range.min_fps, fps_range.max_fps);
7874 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7875 * follows:
7876 * ---------------------------------------------------------------|
7877 * Video stream is absent in configure_streams |
7878 * (Camcorder preview before the first video record |
7879 * ---------------------------------------------------------------|
7880 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7881 * | | | vid_min/max_fps|
7882 * ---------------------------------------------------------------|
7883 * NO | [ 30, 240] | 240 | [240, 240] |
7884 * |-------------|-------------|----------------|
7885 * | [240, 240] | 240 | [240, 240] |
7886 * ---------------------------------------------------------------|
7887 * Video stream is present in configure_streams |
7888 * ---------------------------------------------------------------|
7889 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7890 * | | | vid_min/max_fps|
7891 * ---------------------------------------------------------------|
7892 * NO | [ 30, 240] | 240 | [240, 240] |
7893 * (camcorder prev |-------------|-------------|----------------|
7894 * after video rec | [240, 240] | 240 | [240, 240] |
7895 * is stopped) | | | |
7896 * ---------------------------------------------------------------|
7897 * YES | [ 30, 240] | 240 | [240, 240] |
7898 * |-------------|-------------|----------------|
7899 * | [240, 240] | 240 | [240, 240] |
7900 * ---------------------------------------------------------------|
7901 * When Video stream is absent in configure_streams,
7902 * preview fps = sensor_fps / batchsize
7903 * Eg: for 240fps at batchSize 4, preview = 60fps
7904 * for 120fps at batchSize 4, preview = 30fps
7905 *
7906 * When video stream is present in configure_streams, preview fps is as per
7907 * the ratio of preview buffers to video buffers requested in process
7908 * capture request
7909 */
7910 mBatchSize = 0;
7911 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7912 fps_range.min_fps = fps_range.video_max_fps;
7913 fps_range.video_min_fps = fps_range.video_max_fps;
7914 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7915 fps_range.max_fps);
7916 if (NAME_NOT_FOUND != val) {
7917 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7919 return BAD_VALUE;
7920 }
7921
7922 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7923 /* If batchmode is currently in progress and the fps changes,
7924 * set the flag to restart the sensor */
7925 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7926 (mHFRVideoFps != fps_range.max_fps)) {
7927 mNeedSensorRestart = true;
7928 }
7929 mHFRVideoFps = fps_range.max_fps;
7930 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7931 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7932 mBatchSize = MAX_HFR_BATCH_SIZE;
7933 }
7934 }
7935 CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7936
7937 }
7938 } else {
7939 /* HFR mode is session param in backend/ISP. This should be reset when
7940 * in non-HFR mode */
7941 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7943 return BAD_VALUE;
7944 }
7945 }
7946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7947 return BAD_VALUE;
7948 }
7949 CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7950 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7951 return rc;
7952 }
7953
7954 /*===========================================================================
7955 * FUNCTION : translateToHalMetadata
7956 *
7957 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7958 *
7959 *
7960 * PARAMETERS :
7961 * @request : request sent from framework
7962 *
7963 *
7964 * RETURN : success: NO_ERROR
7965 * failure:
7966 *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)7967 int QCamera3HardwareInterface::translateToHalMetadata
7968 (const camera3_capture_request_t *request,
7969 metadata_buffer_t *hal_metadata,
7970 uint32_t snapshotStreamId)
7971 {
7972 int rc = 0;
7973 CameraMetadata frame_settings;
7974 frame_settings = request->settings;
7975
7976 /* Do not change the order of the following list unless you know what you are
7977 * doing.
7978 * The order is laid out in such a way that parameters in the front of the table
7979 * may be used to override the parameters later in the table. Examples are:
7980 * 1. META_MODE should precede AEC/AWB/AF MODE
7981 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7982 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7983 * 4. Any mode should precede it's corresponding settings
7984 */
7985 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7986 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7988 rc = BAD_VALUE;
7989 }
7990 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7991 if (rc != NO_ERROR) {
7992 ALOGE("%s: extractSceneMode failed", __func__);
7993 }
7994 }
7995
7996 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7997 uint8_t fwk_aeMode =
7998 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7999 uint8_t aeMode;
8000 int32_t redeye;
8001
8002 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8003 aeMode = CAM_AE_MODE_OFF;
8004 } else {
8005 aeMode = CAM_AE_MODE_ON;
8006 }
8007 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8008 redeye = 1;
8009 } else {
8010 redeye = 0;
8011 }
8012
8013 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8014 fwk_aeMode);
8015 if (NAME_NOT_FOUND != val) {
8016 int32_t flashMode = (int32_t)val;
8017 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8018 }
8019
8020 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8022 rc = BAD_VALUE;
8023 }
8024 }
8025
8026 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8027 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8028 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8029 fwk_whiteLevel);
8030 if (NAME_NOT_FOUND != val) {
8031 uint8_t whiteLevel = (uint8_t)val;
8032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8033 rc = BAD_VALUE;
8034 }
8035 }
8036 }
8037
8038 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8039 uint8_t fwk_cacMode =
8040 frame_settings.find(
8041 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8042 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8043 fwk_cacMode);
8044 if (NAME_NOT_FOUND != val) {
8045 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8047 rc = BAD_VALUE;
8048 }
8049 } else {
8050 ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
8051 }
8052 }
8053
8054 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8055 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8056 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8057 fwk_focusMode);
8058 if (NAME_NOT_FOUND != val) {
8059 uint8_t focusMode = (uint8_t)val;
8060 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8061 rc = BAD_VALUE;
8062 }
8063 }
8064 }
8065
8066 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8067 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8069 focalDistance)) {
8070 rc = BAD_VALUE;
8071 }
8072 }
8073
8074 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8075 uint8_t fwk_antibandingMode =
8076 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8077 int val = lookupHalName(ANTIBANDING_MODES_MAP,
8078 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8079 if (NAME_NOT_FOUND != val) {
8080 uint32_t hal_antibandingMode = (uint32_t)val;
8081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8082 hal_antibandingMode)) {
8083 rc = BAD_VALUE;
8084 }
8085 }
8086 }
8087
8088 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8089 int32_t expCompensation = frame_settings.find(
8090 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8091 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8092 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8093 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8094 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8096 expCompensation)) {
8097 rc = BAD_VALUE;
8098 }
8099 }
8100
8101 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8102 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8104 rc = BAD_VALUE;
8105 }
8106 }
8107 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8108 rc = setHalFpsRange(frame_settings, hal_metadata);
8109 if (rc != NO_ERROR) {
8110 ALOGE("%s: setHalFpsRange failed", __func__);
8111 }
8112 }
8113
8114 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8115 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8117 rc = BAD_VALUE;
8118 }
8119 }
8120
8121 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8122 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8123 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8124 fwk_effectMode);
8125 if (NAME_NOT_FOUND != val) {
8126 uint8_t effectMode = (uint8_t)val;
8127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8128 rc = BAD_VALUE;
8129 }
8130 }
8131 }
8132
8133 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8134 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8136 colorCorrectMode)) {
8137 rc = BAD_VALUE;
8138 }
8139 }
8140
8141 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8142 cam_color_correct_gains_t colorCorrectGains;
8143 for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8144 colorCorrectGains.gains[i] =
8145 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8146 }
8147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8148 colorCorrectGains)) {
8149 rc = BAD_VALUE;
8150 }
8151 }
8152
8153 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8154 cam_color_correct_matrix_t colorCorrectTransform;
8155 cam_rational_type_t transform_elem;
8156 size_t num = 0;
8157 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8158 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8159 transform_elem.numerator =
8160 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8161 transform_elem.denominator =
8162 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8163 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8164 num++;
8165 }
8166 }
8167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8168 colorCorrectTransform)) {
8169 rc = BAD_VALUE;
8170 }
8171 }
8172
8173 cam_trigger_t aecTrigger;
8174 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8175 aecTrigger.trigger_id = -1;
8176 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8177 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8178 aecTrigger.trigger =
8179 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8180 aecTrigger.trigger_id =
8181 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8183 aecTrigger)) {
8184 rc = BAD_VALUE;
8185 }
8186 CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
8187 aecTrigger.trigger, aecTrigger.trigger_id);
8188 }
8189
8190 /*af_trigger must come with a trigger id*/
8191 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8192 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8193 cam_trigger_t af_trigger;
8194 af_trigger.trigger =
8195 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8196 af_trigger.trigger_id =
8197 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8199 rc = BAD_VALUE;
8200 }
8201 CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
8202 af_trigger.trigger, af_trigger.trigger_id);
8203 }
8204
8205 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8206 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8208 rc = BAD_VALUE;
8209 }
8210 }
8211 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8212 cam_edge_application_t edge_application;
8213 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8214 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8215 edge_application.sharpness = 0;
8216 } else {
8217 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8218 }
8219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8220 rc = BAD_VALUE;
8221 }
8222 }
8223
8224 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8225 int32_t respectFlashMode = 1;
8226 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8227 uint8_t fwk_aeMode =
8228 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8229 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8230 respectFlashMode = 0;
8231 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
8232 __func__);
8233 }
8234 }
8235 if (respectFlashMode) {
8236 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8237 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8238 CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
8239 // To check: CAM_INTF_META_FLASH_MODE usage
8240 if (NAME_NOT_FOUND != val) {
8241 uint8_t flashMode = (uint8_t)val;
8242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8243 rc = BAD_VALUE;
8244 }
8245 }
8246 }
8247 }
8248
8249 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8250 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8252 rc = BAD_VALUE;
8253 }
8254 }
8255
8256 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8257 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8259 flashFiringTime)) {
8260 rc = BAD_VALUE;
8261 }
8262 }
8263
8264 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8265 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8267 hotPixelMode)) {
8268 rc = BAD_VALUE;
8269 }
8270 }
8271
8272 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8273 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8275 lensAperture)) {
8276 rc = BAD_VALUE;
8277 }
8278 }
8279
8280 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8281 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8283 filterDensity)) {
8284 rc = BAD_VALUE;
8285 }
8286 }
8287
8288 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8289 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8291 focalLength)) {
8292 rc = BAD_VALUE;
8293 }
8294 }
8295
8296 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8297 uint8_t optStabMode =
8298 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8300 optStabMode)) {
8301 rc = BAD_VALUE;
8302 }
8303 }
8304
8305 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8306 uint8_t videoStabMode =
8307 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8309 videoStabMode)) {
8310 rc = BAD_VALUE;
8311 }
8312 }
8313
8314
8315 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8316 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8318 noiseRedMode)) {
8319 rc = BAD_VALUE;
8320 }
8321 }
8322
8323 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8324 float reprocessEffectiveExposureFactor =
8325 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8327 reprocessEffectiveExposureFactor)) {
8328 rc = BAD_VALUE;
8329 }
8330 }
8331
8332 cam_crop_region_t scalerCropRegion;
8333 bool scalerCropSet = false;
8334 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8335 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8336 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8337 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8338 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8339
8340 // Map coordinate system from active array to sensor output.
8341 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8342 scalerCropRegion.width, scalerCropRegion.height);
8343
8344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8345 scalerCropRegion)) {
8346 rc = BAD_VALUE;
8347 }
8348 scalerCropSet = true;
8349 }
8350
8351 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8352 int64_t sensorExpTime =
8353 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8354 CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8356 sensorExpTime)) {
8357 rc = BAD_VALUE;
8358 }
8359 }
8360
8361 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8362 int64_t sensorFrameDuration =
8363 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8364 int64_t minFrameDuration = getMinFrameDuration(request);
8365 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8366 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8367 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8368 CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8370 sensorFrameDuration)) {
8371 rc = BAD_VALUE;
8372 }
8373 }
8374
8375 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8376 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8377 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8378 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8379 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8380 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8381 CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8383 sensorSensitivity)) {
8384 rc = BAD_VALUE;
8385 }
8386 }
8387
8388 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8389 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8390 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8391 rc = BAD_VALUE;
8392 }
8393 }
8394
8395 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8396 uint8_t fwk_facedetectMode =
8397 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8398
8399 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8400 fwk_facedetectMode);
8401
8402 if (NAME_NOT_FOUND != val) {
8403 uint8_t facedetectMode = (uint8_t)val;
8404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8405 facedetectMode)) {
8406 rc = BAD_VALUE;
8407 }
8408 }
8409 }
8410
8411 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8412 uint8_t histogramMode =
8413 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8414 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8415 histogramMode)) {
8416 rc = BAD_VALUE;
8417 }
8418 }
8419
8420 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8421 uint8_t sharpnessMapMode =
8422 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8423 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8424 sharpnessMapMode)) {
8425 rc = BAD_VALUE;
8426 }
8427 }
8428
8429 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8430 uint8_t tonemapMode =
8431 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8433 rc = BAD_VALUE;
8434 }
8435 }
8436 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8437 /*All tonemap channels will have the same number of points*/
8438 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8439 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8440 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8441 cam_rgb_tonemap_curves tonemapCurves;
8442 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8443 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8444 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8445 __func__, tonemapCurves.tonemap_points_cnt,
8446 CAM_MAX_TONEMAP_CURVE_SIZE);
8447 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8448 }
8449
8450 /* ch0 = G*/
8451 size_t point = 0;
8452 cam_tonemap_curve_t tonemapCurveGreen;
8453 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8454 for (size_t j = 0; j < 2; j++) {
8455 tonemapCurveGreen.tonemap_points[i][j] =
8456 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8457 point++;
8458 }
8459 }
8460 tonemapCurves.curves[0] = tonemapCurveGreen;
8461
8462 /* ch 1 = B */
8463 point = 0;
8464 cam_tonemap_curve_t tonemapCurveBlue;
8465 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8466 for (size_t j = 0; j < 2; j++) {
8467 tonemapCurveBlue.tonemap_points[i][j] =
8468 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8469 point++;
8470 }
8471 }
8472 tonemapCurves.curves[1] = tonemapCurveBlue;
8473
8474 /* ch 2 = R */
8475 point = 0;
8476 cam_tonemap_curve_t tonemapCurveRed;
8477 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8478 for (size_t j = 0; j < 2; j++) {
8479 tonemapCurveRed.tonemap_points[i][j] =
8480 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8481 point++;
8482 }
8483 }
8484 tonemapCurves.curves[2] = tonemapCurveRed;
8485
8486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8487 tonemapCurves)) {
8488 rc = BAD_VALUE;
8489 }
8490 }
8491
8492 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8493 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8495 captureIntent)) {
8496 rc = BAD_VALUE;
8497 }
8498 }
8499
8500 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8501 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8503 blackLevelLock)) {
8504 rc = BAD_VALUE;
8505 }
8506 }
8507
8508 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8509 uint8_t lensShadingMapMode =
8510 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8512 lensShadingMapMode)) {
8513 rc = BAD_VALUE;
8514 }
8515 }
8516
8517 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8518 cam_area_t roi;
8519 bool reset = true;
8520 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8521
8522 // Map coordinate system from active array to sensor output.
8523 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8524 roi.rect.height);
8525
8526 if (scalerCropSet) {
8527 reset = resetIfNeededROI(&roi, &scalerCropRegion);
8528 }
8529 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8530 rc = BAD_VALUE;
8531 }
8532 }
8533
8534 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8535 cam_area_t roi;
8536 bool reset = true;
8537 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8538
8539 // Map coordinate system from active array to sensor output.
8540 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8541 roi.rect.height);
8542
8543 if (scalerCropSet) {
8544 reset = resetIfNeededROI(&roi, &scalerCropRegion);
8545 }
8546 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8547 rc = BAD_VALUE;
8548 }
8549 }
8550
8551 if (m_bIs4KVideo) {
8552 /* Override needed for Video template in case of 4K video */
8553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8554 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8555 rc = BAD_VALUE;
8556 }
8557 } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8558 frame_settings.exists(QCAMERA3_CDS_MODE)) {
8559 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8560 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8561 ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8562 } else {
8563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8564 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8565 rc = BAD_VALUE;
8566 }
8567 }
8568 }
8569
8570 // TNR
8571 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8572 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8573 uint8_t b_TnrRequested = 0;
8574 cam_denoise_param_t tnr;
8575 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8576 tnr.process_plates =
8577 (cam_denoise_process_type_t)frame_settings.find(
8578 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8579 b_TnrRequested = tnr.denoise_enable;
8580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8581 rc = BAD_VALUE;
8582 }
8583 }
8584
8585 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8586 int32_t fwk_testPatternMode =
8587 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8588 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8589 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8590
8591 if (NAME_NOT_FOUND != testPatternMode) {
8592 cam_test_pattern_data_t testPatternData;
8593 memset(&testPatternData, 0, sizeof(testPatternData));
8594 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8595 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8596 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8597 int32_t *fwk_testPatternData =
8598 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8599 testPatternData.r = fwk_testPatternData[0];
8600 testPatternData.b = fwk_testPatternData[3];
8601 switch (gCamCapability[mCameraId]->color_arrangement) {
8602 case CAM_FILTER_ARRANGEMENT_RGGB:
8603 case CAM_FILTER_ARRANGEMENT_GRBG:
8604 testPatternData.gr = fwk_testPatternData[1];
8605 testPatternData.gb = fwk_testPatternData[2];
8606 break;
8607 case CAM_FILTER_ARRANGEMENT_GBRG:
8608 case CAM_FILTER_ARRANGEMENT_BGGR:
8609 testPatternData.gr = fwk_testPatternData[2];
8610 testPatternData.gb = fwk_testPatternData[1];
8611 break;
8612 default:
8613 ALOGE("%s: color arrangement %d is not supported", __func__,
8614 gCamCapability[mCameraId]->color_arrangement);
8615 break;
8616 }
8617 }
8618 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8619 testPatternData)) {
8620 rc = BAD_VALUE;
8621 }
8622 } else {
8623 ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8624 fwk_testPatternMode);
8625 }
8626 }
8627
8628 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8629 size_t count = 0;
8630 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8631 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8632 gps_coords.data.d, gps_coords.count, count);
8633 if (gps_coords.count != count) {
8634 rc = BAD_VALUE;
8635 }
8636 }
8637
8638 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8639 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8640 size_t count = 0;
8641 const char *gps_methods_src = (const char *)
8642 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8643 memset(gps_methods, '\0', sizeof(gps_methods));
8644 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8645 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8646 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8647 if (GPS_PROCESSING_METHOD_SIZE != count) {
8648 rc = BAD_VALUE;
8649 }
8650 }
8651
8652 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8653 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8655 gps_timestamp)) {
8656 rc = BAD_VALUE;
8657 }
8658 }
8659
8660 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8661 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8662 cam_rotation_info_t rotation_info;
8663 if (orientation == 0) {
8664 rotation_info.rotation = ROTATE_0;
8665 } else if (orientation == 90) {
8666 rotation_info.rotation = ROTATE_90;
8667 } else if (orientation == 180) {
8668 rotation_info.rotation = ROTATE_180;
8669 } else if (orientation == 270) {
8670 rotation_info.rotation = ROTATE_270;
8671 }
8672 rotation_info.streamId = snapshotStreamId;
8673 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8675 rc = BAD_VALUE;
8676 }
8677 }
8678
8679 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8680 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8681 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8682 rc = BAD_VALUE;
8683 }
8684 }
8685
8686 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8687 uint32_t thumb_quality = (uint32_t)
8688 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8690 thumb_quality)) {
8691 rc = BAD_VALUE;
8692 }
8693 }
8694
8695 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8696 cam_dimension_t dim;
8697 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8698 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8700 rc = BAD_VALUE;
8701 }
8702 }
8703
8704 // Internal metadata
8705 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8706 size_t count = 0;
8707 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8708 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8709 privatedata.data.i32, privatedata.count, count);
8710 if (privatedata.count != count) {
8711 rc = BAD_VALUE;
8712 }
8713 }
8714
8715 if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8716 uint8_t* use_av_timer =
8717 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8719 rc = BAD_VALUE;
8720 }
8721 }
8722
8723 // EV step
8724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8725 gCamCapability[mCameraId]->exp_compensation_step)) {
8726 rc = BAD_VALUE;
8727 }
8728
8729 // CDS info
8730 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8731 cam_cds_data_t *cdsData = (cam_cds_data_t *)
8732 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8733
8734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8735 CAM_INTF_META_CDS_DATA, *cdsData)) {
8736 rc = BAD_VALUE;
8737 }
8738 }
8739
8740 // Hybrid AE
8741 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
8742 uint8_t *hybrid_ae = (uint8_t *)
8743 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
8744
8745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8746 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
8747 rc = BAD_VALUE;
8748 }
8749 }
8750
8751 return rc;
8752 }
8753
8754 /*===========================================================================
8755 * FUNCTION : captureResultCb
8756 *
8757 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8758 *
8759 * PARAMETERS :
8760 * @frame : frame information from mm-camera-interface
8761 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8762 * @userdata: userdata
8763 *
8764 * RETURN : NONE
8765 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)8766 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8767 camera3_stream_buffer_t *buffer,
8768 uint32_t frame_number, bool isInputBuffer, void *userdata)
8769 {
8770 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8771 if (hw == NULL) {
8772 ALOGE("%s: Invalid hw %p", __func__, hw);
8773 return;
8774 }
8775
8776 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8777 return;
8778 }
8779
8780
8781 /*===========================================================================
8782 * FUNCTION : initialize
8783 *
8784 * DESCRIPTION: Pass framework callback pointers to HAL
8785 *
8786 * PARAMETERS :
8787 *
8788 *
8789 * RETURN : Success : 0
8790 * Failure: -ENODEV
8791 *==========================================================================*/
8792
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)8793 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8794 const camera3_callback_ops_t *callback_ops)
8795 {
8796 CDBG("%s: E", __func__);
8797 QCamera3HardwareInterface *hw =
8798 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8799 if (!hw) {
8800 ALOGE("%s: NULL camera device", __func__);
8801 return -ENODEV;
8802 }
8803
8804 int rc = hw->initialize(callback_ops);
8805 CDBG("%s: X", __func__);
8806 return rc;
8807 }
8808
8809 /*===========================================================================
8810 * FUNCTION : configure_streams
8811 *
8812 * DESCRIPTION:
8813 *
8814 * PARAMETERS :
8815 *
8816 *
8817 * RETURN : Success: 0
8818 * Failure: -EINVAL (if stream configuration is invalid)
8819 * -ENODEV (fatal error)
8820 *==========================================================================*/
8821
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)8822 int QCamera3HardwareInterface::configure_streams(
8823 const struct camera3_device *device,
8824 camera3_stream_configuration_t *stream_list)
8825 {
8826 CDBG("%s: E", __func__);
8827 QCamera3HardwareInterface *hw =
8828 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8829 if (!hw) {
8830 ALOGE("%s: NULL camera device", __func__);
8831 return -ENODEV;
8832 }
8833 int rc = hw->configureStreams(stream_list);
8834 CDBG("%s: X", __func__);
8835 return rc;
8836 }
8837
8838 /*===========================================================================
8839 * FUNCTION : construct_default_request_settings
8840 *
8841 * DESCRIPTION: Configure a settings buffer to meet the required use case
8842 *
8843 * PARAMETERS :
8844 *
8845 *
8846 * RETURN : Success: Return valid metadata
8847 * Failure: Return NULL
8848 *==========================================================================*/
8849 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)8850 construct_default_request_settings(const struct camera3_device *device,
8851 int type)
8852 {
8853
8854 CDBG("%s: E", __func__);
8855 camera_metadata_t* fwk_metadata = NULL;
8856 QCamera3HardwareInterface *hw =
8857 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8858 if (!hw) {
8859 ALOGE("%s: NULL camera device", __func__);
8860 return NULL;
8861 }
8862
8863 fwk_metadata = hw->translateCapabilityToMetadata(type);
8864
8865 CDBG("%s: X", __func__);
8866 return fwk_metadata;
8867 }
8868
8869 /*===========================================================================
8870 * FUNCTION : process_capture_request
8871 *
8872 * DESCRIPTION:
8873 *
8874 * PARAMETERS :
8875 *
8876 *
8877 * RETURN :
8878 *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)8879 int QCamera3HardwareInterface::process_capture_request(
8880 const struct camera3_device *device,
8881 camera3_capture_request_t *request)
8882 {
8883 CDBG("%s: E", __func__);
8884 QCamera3HardwareInterface *hw =
8885 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8886 if (!hw) {
8887 ALOGE("%s: NULL camera device", __func__);
8888 return -EINVAL;
8889 }
8890
8891 int rc = hw->processCaptureRequest(request);
8892 CDBG("%s: X", __func__);
8893 return rc;
8894 }
8895
8896 /*===========================================================================
8897 * FUNCTION : dump
8898 *
8899 * DESCRIPTION:
8900 *
8901 * PARAMETERS :
8902 *
8903 *
8904 * RETURN :
8905 *==========================================================================*/
8906
dump(const struct camera3_device * device,int fd)8907 void QCamera3HardwareInterface::dump(
8908 const struct camera3_device *device, int fd)
8909 {
8910 /* Log level property is read when "adb shell dumpsys media.camera" is
8911 called so that the log level can be controlled without restarting
8912 the media server */
8913 getLogLevel();
8914
8915 CDBG("%s: E", __func__);
8916 QCamera3HardwareInterface *hw =
8917 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8918 if (!hw) {
8919 ALOGE("%s: NULL camera device", __func__);
8920 return;
8921 }
8922
8923 hw->dump(fd);
8924 CDBG("%s: X", __func__);
8925 return;
8926 }
8927
8928 /*===========================================================================
8929 * FUNCTION : flush
8930 *
8931 * DESCRIPTION:
8932 *
8933 * PARAMETERS :
8934 *
8935 *
8936 * RETURN :
8937 *==========================================================================*/
8938
flush(const struct camera3_device * device)8939 int QCamera3HardwareInterface::flush(
8940 const struct camera3_device *device)
8941 {
8942 int rc;
8943 CDBG("%s: E", __func__);
8944 QCamera3HardwareInterface *hw =
8945 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8946 if (!hw) {
8947 ALOGE("%s: NULL camera device", __func__);
8948 return -EINVAL;
8949 }
8950
8951 rc = hw->flush();
8952 CDBG("%s: X", __func__);
8953 return rc;
8954 }
8955
8956 /*===========================================================================
8957 * FUNCTION : close_camera_device
8958 *
8959 * DESCRIPTION:
8960 *
8961 * PARAMETERS :
8962 *
8963 *
8964 * RETURN :
8965 *==========================================================================*/
close_camera_device(struct hw_device_t * device)8966 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8967 {
8968 CDBG("%s: E", __func__);
8969 int ret = NO_ERROR;
8970 QCamera3HardwareInterface *hw =
8971 reinterpret_cast<QCamera3HardwareInterface *>(
8972 reinterpret_cast<camera3_device_t *>(device)->priv);
8973 if (!hw) {
8974 ALOGE("NULL camera device");
8975 return BAD_VALUE;
8976 }
8977 delete hw;
8978
8979 CDBG("%s: X", __func__);
8980 return ret;
8981 }
8982
8983 /*===========================================================================
8984 * FUNCTION : getWaveletDenoiseProcessPlate
8985 *
8986 * DESCRIPTION: query wavelet denoise process plate
8987 *
8988 * PARAMETERS : None
8989 *
8990 * RETURN : WNR prcocess plate value
8991 *==========================================================================*/
getWaveletDenoiseProcessPlate()8992 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8993 {
8994 char prop[PROPERTY_VALUE_MAX];
8995 memset(prop, 0, sizeof(prop));
8996 property_get("persist.denoise.process.plates", prop, "0");
8997 int processPlate = atoi(prop);
8998 switch(processPlate) {
8999 case 0:
9000 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9001 case 1:
9002 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9003 case 2:
9004 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9005 case 3:
9006 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9007 default:
9008 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9009 }
9010 }
9011
9012
9013 /*===========================================================================
9014 * FUNCTION : getTemporalDenoiseProcessPlate
9015 *
9016 * DESCRIPTION: query temporal denoise process plate
9017 *
9018 * PARAMETERS : None
9019 *
9020 * RETURN : TNR prcocess plate value
9021 *==========================================================================*/
getTemporalDenoiseProcessPlate()9022 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9023 {
9024 char prop[PROPERTY_VALUE_MAX];
9025 memset(prop, 0, sizeof(prop));
9026 property_get("persist.tnr.process.plates", prop, "0");
9027 int processPlate = atoi(prop);
9028 switch(processPlate) {
9029 case 0:
9030 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9031 case 1:
9032 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9033 case 2:
9034 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9035 case 3:
9036 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9037 default:
9038 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9039 }
9040 }
9041
9042
9043 /*===========================================================================
9044 * FUNCTION : extractSceneMode
9045 *
9046 * DESCRIPTION: Extract scene mode from frameworks set metadata
9047 *
9048 * PARAMETERS :
9049 * @frame_settings: CameraMetadata reference
9050 * @metaMode: ANDROID_CONTORL_MODE
9051 * @hal_metadata: hal metadata structure
9052 *
9053 * RETURN : None
9054 *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)9055 int32_t QCamera3HardwareInterface::extractSceneMode(
9056 const CameraMetadata &frame_settings, uint8_t metaMode,
9057 metadata_buffer_t *hal_metadata)
9058 {
9059 int32_t rc = NO_ERROR;
9060
9061 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9062 camera_metadata_ro_entry entry =
9063 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9064 if (0 == entry.count)
9065 return rc;
9066
9067 uint8_t fwk_sceneMode = entry.data.u8[0];
9068
9069 int val = lookupHalName(SCENE_MODES_MAP,
9070 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9071 fwk_sceneMode);
9072 if (NAME_NOT_FOUND != val) {
9073 uint8_t sceneMode = (uint8_t)val;
9074 CDBG("%s: sceneMode: %d", __func__, sceneMode);
9075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9076 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9077 rc = BAD_VALUE;
9078 }
9079 }
9080 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9081 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9082 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9083 CDBG("%s: sceneMode: %d", __func__, sceneMode);
9084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9085 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9086 rc = BAD_VALUE;
9087 }
9088 }
9089 return rc;
9090 }
9091
9092 /*===========================================================================
9093 * FUNCTION : needRotationReprocess
9094 *
9095 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9096 *
9097 * PARAMETERS : none
9098 *
9099 * RETURN : true: needed
9100 * false: no need
9101 *==========================================================================*/
needRotationReprocess()9102 bool QCamera3HardwareInterface::needRotationReprocess()
9103 {
9104 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9105 // current rotation is not zero, and pp has the capability to process rotation
9106 CDBG_HIGH("%s: need do reprocess for rotation", __func__);
9107 return true;
9108 }
9109
9110 return false;
9111 }
9112
9113 /*===========================================================================
9114 * FUNCTION : needReprocess
9115 *
9116 * DESCRIPTION: if reprocess in needed
9117 *
9118 * PARAMETERS : none
9119 *
9120 * RETURN : true: needed
9121 * false: no need
9122 *==========================================================================*/
needReprocess(uint32_t postprocess_mask)9123 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9124 {
9125 if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
9126 // TODO: add for ZSL HDR later
9127 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9128 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9129 CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
9130 return true;
9131 } else {
9132 CDBG_HIGH("%s: already post processed frame", __func__);
9133 return false;
9134 }
9135 }
9136 return needRotationReprocess();
9137 }
9138
9139 /*===========================================================================
9140 * FUNCTION : needJpegRotation
9141 *
9142 * DESCRIPTION: if rotation from jpeg is needed
9143 *
9144 * PARAMETERS : none
9145 *
9146 * RETURN : true: needed
9147 * false: no need
9148 *==========================================================================*/
needJpegRotation()9149 bool QCamera3HardwareInterface::needJpegRotation()
9150 {
9151 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9152 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9153 CDBG("%s: Need Jpeg to do the rotation", __func__);
9154 return true;
9155 }
9156 return false;
9157 }
9158
9159 /*===========================================================================
9160 * FUNCTION : addOfflineReprocChannel
9161 *
9162 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9163 * coming from input channel
9164 *
9165 * PARAMETERS :
9166 * @config : reprocess configuration
9167 * @inputChHandle : pointer to the input (source) channel
9168 *
9169 *
9170 * RETURN : Ptr to the newly created channel obj. NULL if failed.
9171 *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)9172 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9173 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9174 {
9175 int32_t rc = NO_ERROR;
9176 QCamera3ReprocessChannel *pChannel = NULL;
9177
9178 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9179 mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9180 CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9181 if (NULL == pChannel) {
9182 ALOGE("%s: no mem for reprocess channel", __func__);
9183 return NULL;
9184 }
9185
9186 rc = pChannel->initialize(IS_TYPE_NONE);
9187 if (rc != NO_ERROR) {
9188 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
9189 delete pChannel;
9190 return NULL;
9191 }
9192
9193 // pp feature config
9194 cam_pp_feature_config_t pp_config;
9195 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9196
9197 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9198
9199 rc = pChannel->addReprocStreamsFromSource(pp_config,
9200 config,
9201 IS_TYPE_NONE,
9202 mMetadataChannel);
9203
9204 if (rc != NO_ERROR) {
9205 delete pChannel;
9206 return NULL;
9207 }
9208 return pChannel;
9209 }
9210
9211 /*===========================================================================
9212 * FUNCTION : getMobicatMask
9213 *
9214 * DESCRIPTION: returns mobicat mask
9215 *
9216 * PARAMETERS : none
9217 *
9218 * RETURN : mobicat mask
9219 *
9220 *==========================================================================*/
getMobicatMask()9221 uint8_t QCamera3HardwareInterface::getMobicatMask()
9222 {
9223 return m_MobicatMask;
9224 }
9225
9226 /*===========================================================================
9227 * FUNCTION : setMobicat
9228 *
9229 * DESCRIPTION: set Mobicat on/off.
9230 *
9231 * PARAMETERS :
9232 * @params : none
9233 *
9234 * RETURN : int32_t type of status
9235 * NO_ERROR -- success
9236 * none-zero failure code
9237 *==========================================================================*/
setMobicat()9238 int32_t QCamera3HardwareInterface::setMobicat()
9239 {
9240 char value [PROPERTY_VALUE_MAX];
9241 property_get("persist.camera.mobicat", value, "0");
9242 int32_t ret = NO_ERROR;
9243 uint8_t enableMobi = (uint8_t)atoi(value);
9244
9245 if (enableMobi) {
9246 tune_cmd_t tune_cmd;
9247 tune_cmd.type = SET_RELOAD_CHROMATIX;
9248 tune_cmd.module = MODULE_ALL;
9249 tune_cmd.value = TRUE;
9250 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9251 CAM_INTF_PARM_SET_VFE_COMMAND,
9252 tune_cmd);
9253
9254 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9255 CAM_INTF_PARM_SET_PP_COMMAND,
9256 tune_cmd);
9257 }
9258 m_MobicatMask = enableMobi;
9259
9260 return ret;
9261 }
9262
9263 /*===========================================================================
9264 * FUNCTION : getLogLevel
9265 *
9266 * DESCRIPTION: Reads the log level property into a variable
9267 *
9268 * PARAMETERS :
9269 * None
9270 *
9271 * RETURN :
9272 * None
9273 *==========================================================================*/
getLogLevel()9274 void QCamera3HardwareInterface::getLogLevel()
9275 {
9276 char prop[PROPERTY_VALUE_MAX];
9277 uint32_t globalLogLevel = 0;
9278
9279 property_get("persist.camera.hal.debug", prop, "0");
9280 int val = atoi(prop);
9281 if (0 <= val) {
9282 gCamHal3LogLevel = (uint32_t)val;
9283 }
9284 property_get("persist.camera.global.debug", prop, "0");
9285 val = atoi(prop);
9286 if (0 <= val) {
9287 globalLogLevel = (uint32_t)val;
9288 }
9289
9290 /* Highest log level among hal.logs and global.logs is selected */
9291 if (gCamHal3LogLevel < globalLogLevel)
9292 gCamHal3LogLevel = globalLogLevel;
9293
9294 return;
9295 }
9296
9297 /*===========================================================================
9298 * FUNCTION : validateStreamRotations
9299 *
9300 * DESCRIPTION: Check if the rotations requested are supported
9301 *
9302 * PARAMETERS :
9303 * @stream_list : streams to be configured
9304 *
9305 * RETURN : NO_ERROR on success
9306 * -EINVAL on failure
9307 *
9308 *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)9309 int QCamera3HardwareInterface::validateStreamRotations(
9310 camera3_stream_configuration_t *streamList)
9311 {
9312 int rc = NO_ERROR;
9313
9314 /*
9315 * Loop through all streams requested in configuration
9316 * Check if unsupported rotations have been requested on any of them
9317 */
9318 for (size_t j = 0; j < streamList->num_streams; j++){
9319 camera3_stream_t *newStream = streamList->streams[j];
9320
9321 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9322 bool isImplDef = (newStream->format ==
9323 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9324 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9325 isImplDef);
9326
9327 if (isRotated && (!isImplDef || isZsl)) {
9328 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9329 "type:%d and stream format:%d", __func__,
9330 newStream->rotation, newStream->stream_type,
9331 newStream->format);
9332 rc = -EINVAL;
9333 break;
9334 }
9335 }
9336 return rc;
9337 }
9338
9339 /*===========================================================================
9340 * FUNCTION : getFlashInfo
9341 *
9342 * DESCRIPTION: Retrieve information about whether the device has a flash.
9343 *
9344 * PARAMETERS :
9345 * @cameraId : Camera id to query
9346 * @hasFlash : Boolean indicating whether there is a flash device
9347 * associated with given camera
9348 * @flashNode : If a flash device exists, this will be its device node.
9349 *
9350 * RETURN :
9351 * None
9352 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])9353 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9354 bool& hasFlash,
9355 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9356 {
9357 cam_capability_t* camCapability = gCamCapability[cameraId];
9358 if (NULL == camCapability) {
9359 hasFlash = false;
9360 flashNode[0] = '\0';
9361 } else {
9362 hasFlash = camCapability->flash_available;
9363 strlcpy(flashNode,
9364 (char*)camCapability->flash_dev_name,
9365 QCAMERA_MAX_FILEPATH_LENGTH);
9366 }
9367 }
9368
9369 /*===========================================================================
9370 * FUNCTION : getEepromVersionInfo
9371 *
9372 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
9373 *
9374 * PARAMETERS : None
9375 *
9376 * RETURN : string describing EEPROM version
9377 * "\0" if no such info available
9378 *==========================================================================*/
getEepromVersionInfo()9379 const char *QCamera3HardwareInterface::getEepromVersionInfo()
9380 {
9381 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9382 }
9383
9384 /*===========================================================================
9385 * FUNCTION : getLdafCalib
9386 *
9387 * DESCRIPTION: Retrieve Laser AF calibration data
9388 *
9389 * PARAMETERS : None
9390 *
9391 * RETURN : Two uint32_t describing laser AF calibration data
9392 * NULL if none is available.
9393 *==========================================================================*/
getLdafCalib()9394 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9395 {
9396 if (mLdafCalibExist) {
9397 return &mLdafCalib[0];
9398 } else {
9399 return NULL;
9400 }
9401 }
9402
9403 /*===========================================================================
9404 * FUNCTION : dynamicUpdateMetaStreamInfo
9405 *
9406 * DESCRIPTION: This function:
9407 * (1) stops all the channels
9408 * (2) returns error on pending requests and buffers
9409 * (3) sends metastream_info in setparams
9410 * (4) starts all channels
9411 * This is useful when sensor has to be restarted to apply any
9412 * settings such as frame rate from a different sensor mode
9413 *
9414 * PARAMETERS : None
9415 *
9416 * RETURN : NO_ERROR on success
9417 * Error codes on failure
9418 *
9419 *==========================================================================*/
dynamicUpdateMetaStreamInfo()9420 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9421 {
9422 ATRACE_CALL();
9423 int rc = NO_ERROR;
9424
9425 CDBG("%s: E", __func__);
9426
9427 rc = stopAllChannels();
9428 if (rc < 0) {
9429 ALOGE("%s: stopAllChannels failed", __func__);
9430 return rc;
9431 }
9432
9433 rc = notifyErrorForPendingRequests();
9434 if (rc < 0) {
9435 ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9436 return rc;
9437 }
9438
9439 /* Send meta stream info once again so that ISP can start */
9440 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9441 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9442 CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9443 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9444 mParameters);
9445 if (rc < 0) {
9446 ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9447 __func__);
9448 }
9449
9450 rc = startAllChannels();
9451 if (rc < 0) {
9452 ALOGE("%s: startAllChannels failed", __func__);
9453 return rc;
9454 }
9455
9456 CDBG("%s:%d X", __func__, __LINE__);
9457 return rc;
9458 }
9459
9460 /*===========================================================================
9461 * FUNCTION : stopAllChannels
9462 *
9463 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9464 *
9465 * PARAMETERS : None
9466 *
9467 * RETURN : NO_ERROR on success
9468 * Error codes on failure
9469 *
9470 *==========================================================================*/
stopAllChannels()9471 int32_t QCamera3HardwareInterface::stopAllChannels()
9472 {
9473 int32_t rc = NO_ERROR;
9474
9475 // Stop the Streams/Channels
9476 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9477 it != mStreamInfo.end(); it++) {
9478 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9479 channel->stop();
9480 (*it)->status = INVALID;
9481 }
9482
9483 if (mSupportChannel) {
9484 mSupportChannel->stop();
9485 }
9486 if (mAnalysisChannel) {
9487 mAnalysisChannel->stop();
9488 }
9489 if (mRawDumpChannel) {
9490 mRawDumpChannel->stop();
9491 }
9492 if (mMetadataChannel) {
9493 /* If content of mStreamInfo is not 0, there is metadata stream */
9494 mMetadataChannel->stop();
9495 }
9496
9497 CDBG("%s:%d All channels stopped", __func__, __LINE__);
9498 return rc;
9499 }
9500
9501 /*===========================================================================
9502 * FUNCTION : startAllChannels
9503 *
9504 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9505 *
9506 * PARAMETERS : None
9507 *
9508 * RETURN : NO_ERROR on success
9509 * Error codes on failure
9510 *
9511 *==========================================================================*/
startAllChannels()9512 int32_t QCamera3HardwareInterface::startAllChannels()
9513 {
9514 int32_t rc = NO_ERROR;
9515
9516 CDBG("%s: Start all channels ", __func__);
9517 // Start the Streams/Channels
9518 if (mMetadataChannel) {
9519 /* If content of mStreamInfo is not 0, there is metadata stream */
9520 rc = mMetadataChannel->start();
9521 if (rc < 0) {
9522 ALOGE("%s: META channel start failed", __func__);
9523 return rc;
9524 }
9525 }
9526 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9527 it != mStreamInfo.end(); it++) {
9528 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9529 rc = channel->start();
9530 if (rc < 0) {
9531 ALOGE("%s: channel start failed", __func__);
9532 return rc;
9533 }
9534 }
9535 if (mAnalysisChannel) {
9536 mAnalysisChannel->start();
9537 }
9538 if (mSupportChannel) {
9539 rc = mSupportChannel->start();
9540 if (rc < 0) {
9541 ALOGE("%s: Support channel start failed", __func__);
9542 return rc;
9543 }
9544 }
9545 if (mRawDumpChannel) {
9546 rc = mRawDumpChannel->start();
9547 if (rc < 0) {
9548 ALOGE("%s: RAW dump channel start failed", __func__);
9549 return rc;
9550 }
9551 }
9552
9553 CDBG("%s:%d All channels started", __func__, __LINE__);
9554 return rc;
9555 }
9556
9557 /*===========================================================================
9558 * FUNCTION : notifyErrorForPendingRequests
9559 *
9560 * DESCRIPTION: This function sends error for all the pending requests/buffers
9561 *
9562 * PARAMETERS : None
9563 *
9564 * RETURN : Error codes
9565 * NO_ERROR on success
9566 *
9567 *==========================================================================*/
notifyErrorForPendingRequests()9568 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9569 {
9570 int32_t rc = NO_ERROR;
9571 unsigned int frameNum = 0;
9572 camera3_capture_result_t result;
9573 camera3_stream_buffer_t *pStream_Buf = NULL;
9574 FlushMap flushMap;
9575
9576 memset(&result, 0, sizeof(camera3_capture_result_t));
9577
9578 if (mPendingRequestsList.size() > 0) {
9579 pendingRequestIterator i = mPendingRequestsList.begin();
9580 frameNum = i->frame_number;
9581 } else {
9582 /* There might still be pending buffers even though there are
9583 no pending requests. Setting the frameNum to MAX so that
9584 all the buffers with smaller frame numbers are returned */
9585 frameNum = UINT_MAX;
9586 }
9587
9588 CDBG_HIGH("%s: Oldest frame num on mPendingRequestsList = %d",
9589 __func__, frameNum);
9590
9591 // Go through the pending buffers and group them depending
9592 // on frame number
9593 for (List<PendingBufferInfo>::iterator k =
9594 mPendingBuffersMap.mPendingBufferList.begin();
9595 k != mPendingBuffersMap.mPendingBufferList.end();) {
9596
9597 if (k->frame_number < frameNum) {
9598 ssize_t idx = flushMap.indexOfKey(k->frame_number);
9599 if (idx == NAME_NOT_FOUND) {
9600 Vector<PendingBufferInfo> pending;
9601 pending.add(*k);
9602 flushMap.add(k->frame_number, pending);
9603 } else {
9604 Vector<PendingBufferInfo> &pending =
9605 flushMap.editValueFor(k->frame_number);
9606 pending.add(*k);
9607 }
9608
9609 mPendingBuffersMap.num_buffers--;
9610 k = mPendingBuffersMap.mPendingBufferList.erase(k);
9611 } else {
9612 k++;
9613 }
9614 }
9615
9616 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9617 uint32_t frame_number = flushMap.keyAt(iFlush);
9618 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9619
9620 // Send Error notify to frameworks for each buffer for which
9621 // metadata buffer is already sent
9622 CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9623 __func__, frame_number, pending.size());
9624
9625 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9626 if (NULL == pStream_Buf) {
9627 ALOGE("%s: No memory for pending buffers array", __func__);
9628 return NO_MEMORY;
9629 }
9630 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9631
9632 for (size_t j = 0; j < pending.size(); j++) {
9633 const PendingBufferInfo &info = pending.itemAt(j);
9634 camera3_notify_msg_t notify_msg;
9635 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
9636 notify_msg.type = CAMERA3_MSG_ERROR;
9637 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9638 notify_msg.message.error.error_stream = info.stream;
9639 notify_msg.message.error.frame_number = frame_number;
9640 pStream_Buf[j].acquire_fence = -1;
9641 pStream_Buf[j].release_fence = -1;
9642 pStream_Buf[j].buffer = info.buffer;
9643 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9644 pStream_Buf[j].stream = info.stream;
9645 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
9646 CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9647 frame_number, info.stream);
9648 }
9649
9650 result.result = NULL;
9651 result.frame_number = frame_number;
9652 result.num_output_buffers = (uint32_t)pending.size();
9653 result.output_buffers = pStream_Buf;
9654 mCallbackOps->process_capture_result(mCallbackOps, &result);
9655
9656 delete [] pStream_Buf;
9657 }
9658
9659 CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9660
9661 flushMap.clear();
9662 for (List<PendingBufferInfo>::iterator k =
9663 mPendingBuffersMap.mPendingBufferList.begin();
9664 k != mPendingBuffersMap.mPendingBufferList.end();) {
9665 ssize_t idx = flushMap.indexOfKey(k->frame_number);
9666 if (idx == NAME_NOT_FOUND) {
9667 Vector<PendingBufferInfo> pending;
9668 pending.add(*k);
9669 flushMap.add(k->frame_number, pending);
9670 } else {
9671 Vector<PendingBufferInfo> &pending =
9672 flushMap.editValueFor(k->frame_number);
9673 pending.add(*k);
9674 }
9675
9676 mPendingBuffersMap.num_buffers--;
9677 k = mPendingBuffersMap.mPendingBufferList.erase(k);
9678 }
9679
9680 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9681
9682 // Go through the pending requests info and send error request to framework
9683 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9684 uint32_t frame_number = flushMap.keyAt(iFlush);
9685 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9686 CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9687 __func__, frame_number);
9688
9689 // Send shutter notify to frameworks
9690 camera3_notify_msg_t notify_msg;
9691 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
9692 notify_msg.type = CAMERA3_MSG_ERROR;
9693 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9694 notify_msg.message.error.error_stream = NULL;
9695 notify_msg.message.error.frame_number = frame_number;
9696 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
9697
9698 pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9699 if (NULL == pStream_Buf) {
9700 ALOGE("%s: No memory for pending buffers array", __func__);
9701 return NO_MEMORY;
9702 }
9703 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9704
9705 for (size_t j = 0; j < pending.size(); j++) {
9706 const PendingBufferInfo &info = pending.itemAt(j);
9707 pStream_Buf[j].acquire_fence = -1;
9708 pStream_Buf[j].release_fence = -1;
9709 pStream_Buf[j].buffer = info.buffer;
9710 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9711 pStream_Buf[j].stream = info.stream;
9712 }
9713
9714 result.input_buffer = i->input_buffer;
9715 result.num_output_buffers = (uint32_t)pending.size();
9716 result.output_buffers = pStream_Buf;
9717 result.result = NULL;
9718 result.frame_number = frame_number;
9719 mCallbackOps->process_capture_result(mCallbackOps, &result);
9720 delete [] pStream_Buf;
9721 i = erasePendingRequest(i);
9722 }
9723
9724 /* Reset pending frame Drop list and requests list */
9725 mPendingFrameDropList.clear();
9726
9727 flushMap.clear();
9728 mPendingBuffersMap.num_buffers = 0;
9729 mPendingBuffersMap.mPendingBufferList.clear();
9730 mPendingReprocessResultList.clear();
9731 CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9732
9733 return rc;
9734 }
9735
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)9736 bool QCamera3HardwareInterface::isOnEncoder(
9737 const cam_dimension_t max_viewfinder_size,
9738 uint32_t width, uint32_t height)
9739 {
9740 return (width > (uint32_t)max_viewfinder_size.width ||
9741 height > (uint32_t)max_viewfinder_size.height);
9742 }
9743
9744 /*===========================================================================
9745 * FUNCTION : setBundleInfo
9746 *
9747 * DESCRIPTION: Set bundle info for all streams that are bundle.
9748 *
9749 * PARAMETERS : None
9750 *
9751 * RETURN : NO_ERROR on success
9752 * Error codes on failure
9753 *==========================================================================*/
setBundleInfo()9754 int32_t QCamera3HardwareInterface::setBundleInfo()
9755 {
9756 int32_t rc = NO_ERROR;
9757
9758 if (mChannelHandle) {
9759 cam_bundle_config_t bundleInfo;
9760 memset(&bundleInfo, 0, sizeof(bundleInfo));
9761 rc = mCameraHandle->ops->get_bundle_info(
9762 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9763 if (rc != NO_ERROR) {
9764 ALOGE("%s: get_bundle_info failed", __func__);
9765 return rc;
9766 }
9767 if (mAnalysisChannel) {
9768 mAnalysisChannel->setBundleInfo(bundleInfo);
9769 }
9770 if (mSupportChannel) {
9771 mSupportChannel->setBundleInfo(bundleInfo);
9772 }
9773 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9774 it != mStreamInfo.end(); it++) {
9775 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9776 channel->setBundleInfo(bundleInfo);
9777 }
9778 if (mRawDumpChannel) {
9779 mRawDumpChannel->setBundleInfo(bundleInfo);
9780 }
9781 }
9782
9783 return rc;
9784 }
9785
9786 }; //end namespace qcamera
9787