1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera3HWI"
31 //#define LOG_NDEBUG 0
32
33 #define __STDC_LIMIT_MACROS
34
35 // To remove
36 #include <cutils/properties.h>
37
38 // System dependencies
39 #include <dlfcn.h>
40 #include <fcntl.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include <time.h>
44 #include <sync/sync.h>
45 #include "gralloc_priv.h"
46 #include <unordered_map>
47
48 // Display dependencies
49 #include "qdMetaData.h"
50
51 // Camera dependencies
52 #include "android/QCamera3External.h"
53 #include "util/QCameraFlash.h"
54 #include "QCamera3HWI.h"
55 #include "QCamera3VendorTags.h"
56 #include "QCameraTrace.h"
57
58 extern "C" {
59 #include "mm_camera_dbg.h"
60 }
61 #include "cam_cond.h"
62
63 using namespace android;
64
65 namespace qcamera {
66
67 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
68
69 #define EMPTY_PIPELINE_DELAY 2
70 #define PARTIAL_RESULT_COUNT 2
71 #define FRAME_SKIP_DELAY 0
72
73 #define MAX_VALUE_8BIT ((1<<8)-1)
74 #define MAX_VALUE_10BIT ((1<<10)-1)
75 #define MAX_VALUE_12BIT ((1<<12)-1)
76
77 #define VIDEO_4K_WIDTH 3840
78 #define VIDEO_4K_HEIGHT 2160
79
80 #define MAX_EIS_WIDTH 3840
81 #define MAX_EIS_HEIGHT 2160
82
83 #define MAX_RAW_STREAMS 1
84 #define MAX_STALLING_STREAMS 1
85 #define MAX_PROCESSED_STREAMS 3
86 /* Batch mode is enabled only if FPS set is equal to or greater than this */
87 #define MIN_FPS_FOR_BATCH_MODE (120)
88 #define PREVIEW_FPS_FOR_HFR (30)
89 #define DEFAULT_VIDEO_FPS (30.0)
90 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
91 #define MAX_HFR_BATCH_SIZE (8)
92 #define REGIONS_TUPLE_COUNT 5
93 #define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
94 // Set a threshold for detection of missing buffers //seconds
95 #define MISSING_REQUEST_BUF_TIMEOUT 3
96 #define FLUSH_TIMEOUT 3
97 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
98
99 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
100 CAM_QCOM_FEATURE_CROP |\
101 CAM_QCOM_FEATURE_ROTATION |\
102 CAM_QCOM_FEATURE_SHARPNESS |\
103 CAM_QCOM_FEATURE_SCALE |\
104 CAM_QCOM_FEATURE_CAC |\
105 CAM_QCOM_FEATURE_CDS )
106 /* Per configuration size for static metadata length*/
107 #define PER_CONFIGURATION_SIZE_3 (3)
108
109 #define TIMEOUT_NEVER -1
110
111 // Whether to check for the GPU stride padding, or use the default
112 //#define CHECK_GPU_PIXEL_ALIGNMENT
113
114 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
115 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
116 extern pthread_mutex_t gCamLock;
117 volatile uint32_t gCamHal3LogLevel = 1;
118 extern uint8_t gNumCameraSessions;
119
120 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
121 {"On", CAM_CDS_MODE_ON},
122 {"Off", CAM_CDS_MODE_OFF},
123 {"Auto",CAM_CDS_MODE_AUTO}
124 };
125
126 const QCamera3HardwareInterface::QCameraMap<
127 camera_metadata_enum_android_control_effect_mode_t,
128 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
129 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
130 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
131 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
132 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
133 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
134 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
135 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
136 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
137 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
138 };
139
140 const QCamera3HardwareInterface::QCameraMap<
141 camera_metadata_enum_android_control_awb_mode_t,
142 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
143 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
144 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
145 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
146 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
147 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
148 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
149 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
150 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
151 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
152 };
153
154 const QCamera3HardwareInterface::QCameraMap<
155 camera_metadata_enum_android_control_scene_mode_t,
156 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
157 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
158 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
159 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
160 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
161 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
162 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
163 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
164 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
165 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
166 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
167 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
168 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
169 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
170 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
171 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
172 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
173 };
174
175 const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_af_mode_t,
177 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
178 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
179 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
180 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
181 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
182 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
183 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
184 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
185 };
186
187 const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_color_correction_aberration_mode_t,
189 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
190 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
191 CAM_COLOR_CORRECTION_ABERRATION_OFF },
192 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
193 CAM_COLOR_CORRECTION_ABERRATION_FAST },
194 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
195 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
196 };
197
198 const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_ae_antibanding_mode_t,
200 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
201 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
202 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
203 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
204 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
205 };
206
207 const QCamera3HardwareInterface::QCameraMap<
208 camera_metadata_enum_android_control_ae_mode_t,
209 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
210 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
211 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
212 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
213 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
214 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
215 };
216
217 const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_flash_mode_t,
219 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
220 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
221 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
222 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
223 };
224
225 const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_statistics_face_detect_mode_t,
227 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
228 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
229 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
230 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
231 };
232
233 const QCamera3HardwareInterface::QCameraMap<
234 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
235 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
236 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
237 CAM_FOCUS_UNCALIBRATED },
238 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
239 CAM_FOCUS_APPROXIMATE },
240 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
241 CAM_FOCUS_CALIBRATED }
242 };
243
244 const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_lens_state_t,
246 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
247 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
248 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
249 };
250
251 const int32_t available_thumbnail_sizes[] = {0, 0,
252 176, 144,
253 240, 144,
254 256, 144,
255 240, 160,
256 256, 154,
257 240, 240,
258 320, 240};
259
260 const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_sensor_test_pattern_mode_t,
262 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
263 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
264 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
265 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
266 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
267 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
268 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
269 };
270
271 /* Since there is no mapping for all the options some Android enum are not listed.
272 * Also, the order in this list is important because while mapping from HAL to Android it will
273 * traverse from lower to higher index which means that for HAL values that are map to different
274 * Android values, the traverse logic will select the first one found.
275 */
276 const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_sensor_reference_illuminant1_t,
278 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
279 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
280 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
281 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
282 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
283 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
284 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
285 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
286 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
287 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
288 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
289 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
290 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
291 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
292 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
293 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
294 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
295 };
296
297 const QCamera3HardwareInterface::QCameraMap<
298 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
299 { 60, CAM_HFR_MODE_60FPS},
300 { 90, CAM_HFR_MODE_90FPS},
301 { 120, CAM_HFR_MODE_120FPS},
302 { 150, CAM_HFR_MODE_150FPS},
303 { 180, CAM_HFR_MODE_180FPS},
304 { 210, CAM_HFR_MODE_210FPS},
305 { 240, CAM_HFR_MODE_240FPS},
306 { 480, CAM_HFR_MODE_480FPS},
307 };
308
309 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
310 .initialize = QCamera3HardwareInterface::initialize,
311 .configure_streams = QCamera3HardwareInterface::configure_streams,
312 .register_stream_buffers = NULL,
313 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
314 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
315 .get_metadata_vendor_tag_ops = NULL,
316 .dump = QCamera3HardwareInterface::dump,
317 .flush = QCamera3HardwareInterface::flush,
318 .reserved = {0},
319 };
320
321 typedef std::tuple<int32_t, int32_t, int32_t, int32_t> config_entry;
322
operator ==(const config_entry & lhs,const config_entry & rhs)323 bool operator == (const config_entry & lhs, const config_entry & rhs) {
324 return (std::get<0> (lhs) == std::get<0> (rhs)) &&
325 (std::get<1> (lhs) == std::get<1> (rhs)) &&
326 (std::get<2> (lhs) == std::get<2> (rhs)) &&
327 (std::get<3> (lhs) == std::get<3> (rhs));
328 }
329
330 struct ConfigEntryHash {
operator ()qcamera::ConfigEntryHash331 std::size_t operator() (config_entry const& entry) const {
332 size_t result = 1;
333 size_t hashValue = 31;
334 result = hashValue*result + std::hash<int> {} (std::get<0>(entry));
335 result = hashValue*result + std::hash<int> {} (std::get<1>(entry));
336 result = hashValue*result + std::hash<int> {} (std::get<2>(entry));
337 result = hashValue*result + std::hash<int> {} (std::get<3>(entry));
338 return result;
339 }
340 };
341
342 // initialise to some default value
343 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
344
345 /*===========================================================================
346 * FUNCTION : QCamera3HardwareInterface
347 *
348 * DESCRIPTION: constructor of QCamera3HardwareInterface
349 *
350 * PARAMETERS :
351 * @cameraId : camera ID
352 *
353 * RETURN : none
354 *==========================================================================*/
QCamera3HardwareInterface(uint32_t cameraId,const camera_module_callbacks_t * callbacks)355 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
356 const camera_module_callbacks_t *callbacks)
357 : mCameraId(cameraId),
358 mCameraHandle(NULL),
359 mCameraInitialized(false),
360 mCallbackOps(NULL),
361 mMetadataChannel(NULL),
362 mPictureChannel(NULL),
363 mRawChannel(NULL),
364 mSupportChannel(NULL),
365 mAnalysisChannel(NULL),
366 mRawDumpChannel(NULL),
367 mDummyBatchChannel(NULL),
368 m_perfLock(),
369 mCommon(),
370 mChannelHandle(0),
371 mFirstConfiguration(true),
372 mFlush(false),
373 mFlushPerf(false),
374 mParamHeap(NULL),
375 mParameters(NULL),
376 mPrevParameters(NULL),
377 m_bIsVideo(false),
378 m_bIs4KVideo(false),
379 m_bEisSupportedSize(false),
380 m_bEisEnable(false),
381 m_MobicatMask(0),
382 mMinProcessedFrameDuration(0),
383 mMinJpegFrameDuration(0),
384 mMinRawFrameDuration(0),
385 mMetaFrameCount(0U),
386 mUpdateDebugLevel(false),
387 mCallbacks(callbacks),
388 mCaptureIntent(ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
389 mCacMode(0),
390 mHybridAeEnable(0),
391 /* DevCamDebug metadata internal m control*/
392 mDevCamDebugMetaEnable(0),
393 /* DevCamDebug metadata end */
394 mBatchSize(0),
395 mToBeQueuedVidBufs(0),
396 mHFRVideoFps(DEFAULT_VIDEO_FPS),
397 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
398 mFirstFrameNumberInBatch(0),
399 mNeedSensorRestart(false),
400 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
401 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
402 mLdafCalibExist(false),
403 mPowerHintEnabled(false),
404 mLastCustIntentFrmNum(-1),
405 mState(CLOSED),
406 mIsDeviceLinked(false),
407 mIsMainCamera(true),
408 mLinkedCameraId(0),
409 m_pRelCamSyncHeap(NULL),
410 m_pRelCamSyncBuf(NULL),
411 mAfTrigger()
412 {
413 getLogLevel();
414 m_perfLock.lock_init();
415 mCommon.init(gCamCapability[cameraId]);
416 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
417 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
418 mCameraDevice.common.close = close_camera_device;
419 mCameraDevice.ops = &mCameraOps;
420 mCameraDevice.priv = this;
421 gCamCapability[cameraId]->version = CAM_HAL_V3;
422 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
423 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
424 gCamCapability[cameraId]->min_num_pp_bufs = 3;
425
426 PTHREAD_COND_INIT(&mBuffersCond);
427
428 PTHREAD_COND_INIT(&mRequestCond);
429 mPendingLiveRequest = 0;
430 mCurrentRequestId = -1;
431 pthread_mutex_init(&mMutex, NULL);
432
433 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
434 mDefaultMetadata[i] = NULL;
435
436 // Getting system props of different kinds
437 char prop[PROPERTY_VALUE_MAX];
438 memset(prop, 0, sizeof(prop));
439 property_get("persist.camera.raw.dump", prop, "0");
440 mEnableRawDump = atoi(prop);
441 if (mEnableRawDump)
442 LOGD("Raw dump from Camera HAL enabled");
443
444 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
445 memset(mLdafCalib, 0, sizeof(mLdafCalib));
446
447 memset(prop, 0, sizeof(prop));
448 property_get("persist.camera.tnr.preview", prop, "0");
449 m_bTnrPreview = (uint8_t)atoi(prop);
450
451 memset(prop, 0, sizeof(prop));
452 property_get("persist.camera.tnr.video", prop, "0");
453 m_bTnrVideo = (uint8_t)atoi(prop);
454
455 memset(prop, 0, sizeof(prop));
456 property_get("persist.camera.avtimer.debug", prop, "0");
457 m_debug_avtimer = (uint8_t)atoi(prop);
458
459 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
460
461 //Load and read GPU library.
462 lib_surface_utils = NULL;
463 LINK_get_surface_pixel_alignment = NULL;
464 mSurfaceStridePadding = CAM_PAD_TO_64;
465 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
466 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
467 if (lib_surface_utils) {
468 *(void **)&LINK_get_surface_pixel_alignment =
469 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
470 if (LINK_get_surface_pixel_alignment) {
471 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
472 }
473 dlclose(lib_surface_utils);
474 }
475 #endif
476 m60HzZone = is60HzZone();
477 }
478
479 /*===========================================================================
480 * FUNCTION : ~QCamera3HardwareInterface
481 *
482 * DESCRIPTION: destructor of QCamera3HardwareInterface
483 *
484 * PARAMETERS : none
485 *
486 * RETURN : none
487 *==========================================================================*/
~QCamera3HardwareInterface()488 QCamera3HardwareInterface::~QCamera3HardwareInterface()
489 {
490 LOGD("E");
491
492 /* Turn off current power hint before acquiring perfLock in case they
493 * conflict with each other */
494 disablePowerHint();
495
496 m_perfLock.lock_acq();
497
498 /* We need to stop all streams before deleting any stream */
499 if (mRawDumpChannel) {
500 mRawDumpChannel->stop();
501 }
502
503 // NOTE: 'camera3_stream_t *' objects are already freed at
504 // this stage by the framework
505 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
506 it != mStreamInfo.end(); it++) {
507 QCamera3ProcessingChannel *channel = (*it)->channel;
508 if (channel) {
509 channel->stop();
510 }
511 }
512 if (mSupportChannel)
513 mSupportChannel->stop();
514
515 if (mAnalysisChannel) {
516 mAnalysisChannel->stop();
517 }
518 if (mMetadataChannel) {
519 mMetadataChannel->stop();
520 }
521 if (mChannelHandle) {
522 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
523 mChannelHandle);
524 LOGD("stopping channel %d", mChannelHandle);
525 }
526
527 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
528 it != mStreamInfo.end(); it++) {
529 QCamera3ProcessingChannel *channel = (*it)->channel;
530 if (channel)
531 delete channel;
532 free (*it);
533 }
534 if (mSupportChannel) {
535 delete mSupportChannel;
536 mSupportChannel = NULL;
537 }
538
539 if (mAnalysisChannel) {
540 delete mAnalysisChannel;
541 mAnalysisChannel = NULL;
542 }
543 if (mRawDumpChannel) {
544 delete mRawDumpChannel;
545 mRawDumpChannel = NULL;
546 }
547 if (mDummyBatchChannel) {
548 delete mDummyBatchChannel;
549 mDummyBatchChannel = NULL;
550 }
551 mPictureChannel = NULL;
552
553 if (mMetadataChannel) {
554 delete mMetadataChannel;
555 mMetadataChannel = NULL;
556 }
557
558 /* Clean up all channels */
559 if (mCameraInitialized) {
560 if(!mFirstConfiguration){
561 //send the last unconfigure
562 cam_stream_size_info_t stream_config_info;
563 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
564 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
565 stream_config_info.buffer_info.max_buffers =
566 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
567 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
568 stream_config_info);
569 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
570 if (rc < 0) {
571 LOGE("set_parms failed for unconfigure");
572 }
573 }
574 deinitParameters();
575 }
576
577 if (mChannelHandle) {
578 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
579 mChannelHandle);
580 LOGH("deleting channel %d", mChannelHandle);
581 mChannelHandle = 0;
582 }
583
584 if (mState != CLOSED)
585 closeCamera();
586
587 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
588 req.mPendingBufferList.clear();
589 }
590 mPendingBuffersMap.mPendingBuffersInRequest.clear();
591 mPendingReprocessResultList.clear();
592 for (pendingRequestIterator i = mPendingRequestsList.begin();
593 i != mPendingRequestsList.end();) {
594 i = erasePendingRequest(i);
595 }
596 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
597 if (mDefaultMetadata[i])
598 free_camera_metadata(mDefaultMetadata[i]);
599
600 m_perfLock.lock_rel();
601 m_perfLock.lock_deinit();
602
603 pthread_cond_destroy(&mRequestCond);
604
605 pthread_cond_destroy(&mBuffersCond);
606
607 pthread_mutex_destroy(&mMutex);
608 LOGD("X");
609 }
610
611 /*===========================================================================
612 * FUNCTION : erasePendingRequest
613 *
614 * DESCRIPTION: function to erase a desired pending request after freeing any
615 * allocated memory
616 *
617 * PARAMETERS :
618 * @i : iterator pointing to pending request to be erased
619 *
620 * RETURN : iterator pointing to the next request
621 *==========================================================================*/
622 QCamera3HardwareInterface::pendingRequestIterator
erasePendingRequest(pendingRequestIterator i)623 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
624 {
625 if (i->input_buffer != NULL) {
626 free(i->input_buffer);
627 i->input_buffer = NULL;
628 }
629 if (i->settings != NULL)
630 free_camera_metadata((camera_metadata_t*)i->settings);
631 return mPendingRequestsList.erase(i);
632 }
633
634 /*===========================================================================
635 * FUNCTION : camEvtHandle
636 *
637 * DESCRIPTION: Function registered to mm-camera-interface to handle events
638 *
639 * PARAMETERS :
640 * @camera_handle : interface layer camera handle
641 * @evt : ptr to event
642 * @user_data : user data ptr
643 *
644 * RETURN : none
645 *==========================================================================*/
camEvtHandle(uint32_t,mm_camera_event_t * evt,void * user_data)646 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
647 mm_camera_event_t *evt,
648 void *user_data)
649 {
650 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
651 if (obj && evt) {
652 switch(evt->server_event_type) {
653 case CAM_EVENT_TYPE_DAEMON_DIED:
654 pthread_mutex_lock(&obj->mMutex);
655 obj->mState = ERROR;
656 pthread_mutex_unlock(&obj->mMutex);
657 LOGE("Fatal, camera daemon died");
658 break;
659
660 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
661 LOGD("HAL got request pull from Daemon");
662 pthread_mutex_lock(&obj->mMutex);
663 obj->mWokenUpByDaemon = true;
664 obj->unblockRequestIfNecessary();
665 pthread_mutex_unlock(&obj->mMutex);
666 break;
667
668 default:
669 LOGW("Warning: Unhandled event %d",
670 evt->server_event_type);
671 break;
672 }
673 } else {
674 LOGE("NULL user_data/evt");
675 }
676 }
677
678 /*===========================================================================
679 * FUNCTION : openCamera
680 *
681 * DESCRIPTION: open camera
682 *
683 * PARAMETERS :
684 * @hw_device : double ptr for camera device struct
685 *
686 * RETURN : int32_t type of status
687 * NO_ERROR -- success
688 * none-zero failure code
689 *==========================================================================*/
openCamera(struct hw_device_t ** hw_device)690 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
691 {
692 int rc = 0;
693 if (mState != CLOSED) {
694 *hw_device = NULL;
695 return PERMISSION_DENIED;
696 }
697
698 m_perfLock.lock_acq();
699 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
700 mCameraId);
701
702 rc = openCamera();
703 if (rc == 0) {
704 *hw_device = &mCameraDevice.common;
705 } else
706 *hw_device = NULL;
707
708 m_perfLock.lock_rel();
709 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
710 mCameraId, rc);
711
712 if (rc == NO_ERROR) {
713 mState = OPENED;
714 }
715 return rc;
716 }
717
718 /*===========================================================================
719 * FUNCTION : openCamera
720 *
721 * DESCRIPTION: open camera
722 *
723 * PARAMETERS : none
724 *
725 * RETURN : int32_t type of status
726 * NO_ERROR -- success
727 * none-zero failure code
728 *==========================================================================*/
openCamera()729 int QCamera3HardwareInterface::openCamera()
730 {
731 int rc = 0;
732 char value[PROPERTY_VALUE_MAX];
733
734 KPI_ATRACE_CALL();
735 if (mCameraHandle) {
736 LOGE("Failure: Camera already opened");
737 return ALREADY_EXISTS;
738 }
739
740 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
741 if (rc < 0) {
742 LOGE("Failed to reserve flash for camera id: %d",
743 mCameraId);
744 return UNKNOWN_ERROR;
745 }
746
747 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
748 if (rc) {
749 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
750 return rc;
751 }
752
753 if (!mCameraHandle) {
754 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
755 return -ENODEV;
756 }
757
758 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
759 camEvtHandle, (void *)this);
760
761 if (rc < 0) {
762 LOGE("Error, failed to register event callback");
763 /* Not closing camera here since it is already handled in destructor */
764 return FAILED_TRANSACTION;
765 }
766
767 mExifParams.debug_params =
768 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
769 if (mExifParams.debug_params) {
770 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
771 } else {
772 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
773 return NO_MEMORY;
774 }
775 mFirstConfiguration = true;
776
777 //Notify display HAL that a camera session is active.
778 //But avoid calling the same during bootup because camera service might open/close
779 //cameras at boot time during its initialization and display service will also internally
780 //wait for camera service to initialize first while calling this display API, resulting in a
781 //deadlock situation. Since boot time camera open/close calls are made only to fetch
782 //capabilities, no need of this display bw optimization.
783 //Use "service.bootanim.exit" property to know boot status.
784 property_get("service.bootanim.exit", value, "0");
785 if (atoi(value) == 1) {
786 pthread_mutex_lock(&gCamLock);
787 if (gNumCameraSessions++ == 0) {
788 setCameraLaunchStatus(true);
789 }
790 pthread_mutex_unlock(&gCamLock);
791 }
792
793 //fill the session id needed while linking dual cam
794 pthread_mutex_lock(&gCamLock);
795 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
796 &sessionId[mCameraId]);
797 pthread_mutex_unlock(&gCamLock);
798
799 if (rc < 0) {
800 LOGE("Error, failed to get sessiion id");
801 return UNKNOWN_ERROR;
802 } else {
803 //Allocate related cam sync buffer
804 //this is needed for the payload that goes along with bundling cmd for related
805 //camera use cases
806 m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
807 rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
808 if(rc != OK) {
809 rc = NO_MEMORY;
810 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
811 return NO_MEMORY;
812 }
813
814 //Map memory for related cam sync buffer
815 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
816 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
817 m_pRelCamSyncHeap->getFd(0),
818 sizeof(cam_sync_related_sensors_event_info_t),
819 m_pRelCamSyncHeap->getPtr(0));
820 if(rc < 0) {
821 LOGE("Dualcam: failed to map Related cam sync buffer");
822 rc = FAILED_TRANSACTION;
823 return NO_MEMORY;
824 }
825 m_pRelCamSyncBuf =
826 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
827 }
828
829 LOGH("mCameraId=%d",mCameraId);
830
831 return NO_ERROR;
832 }
833
834 /*===========================================================================
835 * FUNCTION : closeCamera
836 *
837 * DESCRIPTION: close camera
838 *
839 * PARAMETERS : none
840 *
841 * RETURN : int32_t type of status
842 * NO_ERROR -- success
843 * none-zero failure code
844 *==========================================================================*/
closeCamera()845 int QCamera3HardwareInterface::closeCamera()
846 {
847 KPI_ATRACE_CALL();
848 int rc = NO_ERROR;
849 char value[PROPERTY_VALUE_MAX];
850
851 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
852 mCameraId);
853 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
854 mCameraHandle = NULL;
855
856 //reset session id to some invalid id
857 pthread_mutex_lock(&gCamLock);
858 sessionId[mCameraId] = 0xDEADBEEF;
859 pthread_mutex_unlock(&gCamLock);
860
861 //Notify display HAL that there is no active camera session
862 //but avoid calling the same during bootup. Refer to openCamera
863 //for more details.
864 property_get("service.bootanim.exit", value, "0");
865 if (atoi(value) == 1) {
866 pthread_mutex_lock(&gCamLock);
867 if (--gNumCameraSessions == 0) {
868 setCameraLaunchStatus(false);
869 }
870 pthread_mutex_unlock(&gCamLock);
871 }
872
873 if (NULL != m_pRelCamSyncHeap) {
874 m_pRelCamSyncHeap->deallocate();
875 delete m_pRelCamSyncHeap;
876 m_pRelCamSyncHeap = NULL;
877 m_pRelCamSyncBuf = NULL;
878 }
879
880 if (mExifParams.debug_params) {
881 free(mExifParams.debug_params);
882 mExifParams.debug_params = NULL;
883 }
884 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
885 LOGW("Failed to release flash for camera id: %d",
886 mCameraId);
887 }
888 mState = CLOSED;
889 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
890 mCameraId, rc);
891 return rc;
892 }
893
894 /*===========================================================================
895 * FUNCTION : initialize
896 *
897 * DESCRIPTION: Initialize frameworks callback functions
898 *
899 * PARAMETERS :
900 * @callback_ops : callback function to frameworks
901 *
902 * RETURN :
903 *
904 *==========================================================================*/
initialize(const struct camera3_callback_ops * callback_ops)905 int QCamera3HardwareInterface::initialize(
906 const struct camera3_callback_ops *callback_ops)
907 {
908 ATRACE_CALL();
909 int rc;
910
911 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
912 pthread_mutex_lock(&mMutex);
913
914 // Validate current state
915 switch (mState) {
916 case OPENED:
917 /* valid state */
918 break;
919 default:
920 LOGE("Invalid state %d", mState);
921 rc = -ENODEV;
922 goto err1;
923 }
924
925 rc = initParameters();
926 if (rc < 0) {
927 LOGE("initParamters failed %d", rc);
928 goto err1;
929 }
930 mCallbackOps = callback_ops;
931
932 mChannelHandle = mCameraHandle->ops->add_channel(
933 mCameraHandle->camera_handle, NULL, NULL, this);
934 if (mChannelHandle == 0) {
935 LOGE("add_channel failed");
936 rc = -ENOMEM;
937 pthread_mutex_unlock(&mMutex);
938 return rc;
939 }
940
941 pthread_mutex_unlock(&mMutex);
942 mCameraInitialized = true;
943 mState = INITIALIZED;
944 LOGI("X");
945 return 0;
946
947 err1:
948 pthread_mutex_unlock(&mMutex);
949 return rc;
950 }
951
952 /*===========================================================================
953 * FUNCTION : validateStreamDimensions
954 *
955 * DESCRIPTION: Check if the configuration requested are those advertised
956 *
957 * PARAMETERS :
958 * @cameraId : cameraId
959 * @stream_list : streams to be configured
960 *
961 * RETURN :
962 *
963 *==========================================================================*/
validateStreamDimensions(uint32_t cameraId,camera3_stream_configuration_t * streamList)964 int QCamera3HardwareInterface::validateStreamDimensions(uint32_t cameraId,
965 camera3_stream_configuration_t *streamList)
966 {
967 int rc = NO_ERROR;
968 size_t count = 0;
969
970 camera3_stream_t *inputStream = NULL;
971 /*
972 * Loop through all streams to find input stream if it exists*
973 */
974 for (size_t i = 0; i< streamList->num_streams; i++) {
975 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
976 if (inputStream != NULL) {
977 LOGE("Error, Multiple input streams requested");
978 return -EINVAL;
979 }
980 inputStream = streamList->streams[i];
981 }
982 }
983 /*
984 * Loop through all streams requested in configuration
985 * Check if unsupported sizes have been requested on any of them
986 */
987 for (size_t j = 0; j < streamList->num_streams; j++) {
988 bool sizeFound = false;
989 camera3_stream_t *newStream = streamList->streams[j];
990
991 uint32_t rotatedHeight = newStream->height;
992 uint32_t rotatedWidth = newStream->width;
993 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
994 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
995 rotatedHeight = newStream->width;
996 rotatedWidth = newStream->height;
997 }
998
999 /*
1000 * Sizes are different for each type of stream format check against
1001 * appropriate table.
1002 */
1003 switch (newStream->format) {
1004 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1005 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1006 case HAL_PIXEL_FORMAT_RAW10:
1007 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1008 for (size_t i = 0; i < count; i++) {
1009 if ((gCamCapability[cameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1010 (gCamCapability[cameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1011 sizeFound = true;
1012 break;
1013 }
1014 }
1015 break;
1016 case HAL_PIXEL_FORMAT_BLOB:
1017 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1018 /* Verify set size against generated sizes table */
1019 for (size_t i = 0; i < count; i++) {
1020 if (((int32_t)rotatedWidth ==
1021 gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1022 ((int32_t)rotatedHeight ==
1023 gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1024 sizeFound = true;
1025 break;
1026 }
1027 }
1028 break;
1029 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1030 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1031 default:
1032 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1033 || newStream->stream_type == CAMERA3_STREAM_INPUT
1034 || IS_USAGE_ZSL(newStream->usage)) {
1035 if (((int32_t)rotatedWidth ==
1036 gCamCapability[cameraId]->active_array_size.width) &&
1037 ((int32_t)rotatedHeight ==
1038 gCamCapability[cameraId]->active_array_size.height)) {
1039 sizeFound = true;
1040 break;
1041 }
1042 /* We could potentially break here to enforce ZSL stream
1043 * set from frameworks always is full active array size
1044 * but it is not clear from the spc if framework will always
1045 * follow that, also we have logic to override to full array
1046 * size, so keeping the logic lenient at the moment
1047 */
1048 }
1049 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
1050 MAX_SIZES_CNT);
1051 for (size_t i = 0; i < count; i++) {
1052 if (((int32_t)rotatedWidth ==
1053 gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
1054 ((int32_t)rotatedHeight ==
1055 gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
1056 sizeFound = true;
1057 break;
1058 }
1059 }
1060 break;
1061 } /* End of switch(newStream->format) */
1062
1063 /* We error out even if a single stream has unsupported size set */
1064 if (!sizeFound) {
1065 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1066 rotatedWidth, rotatedHeight, newStream->format,
1067 gCamCapability[cameraId]->active_array_size.width,
1068 gCamCapability[cameraId]->active_array_size.height);
1069 rc = -EINVAL;
1070 break;
1071 }
1072 } /* End of for each stream */
1073 return rc;
1074 }
1075
1076 /*===========================================================================
1077 * FUNCTION : validateUsageFlags
1078 *
1079 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1080 *
1081 * PARAMETERS :
1082 * @stream_list : streams to be configured
1083 *
1084 * RETURN :
1085 * NO_ERROR if the usage flags are supported
1086 * error code if usage flags are not supported
1087 *
1088 *==========================================================================*/
validateUsageFlags(const camera3_stream_configuration_t * streamList)1089 int QCamera3HardwareInterface::validateUsageFlags(
1090 const camera3_stream_configuration_t* streamList)
1091 {
1092 for (size_t j = 0; j < streamList->num_streams; j++) {
1093 const camera3_stream_t *newStream = streamList->streams[j];
1094
1095 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1096 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1097 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1098 continue;
1099 }
1100
1101 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1102 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1103 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1104 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1105 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height);
1106 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1107 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height);
1108 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1109 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height);
1110
1111 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1112 // So color spaces will always match.
1113
1114 // Check whether underlying formats of shared streams match.
1115 if (isVideo && isPreview && videoFormat != previewFormat) {
1116 LOGE("Combined video and preview usage flag is not supported");
1117 return -EINVAL;
1118 }
1119 if (isPreview && isZSL && previewFormat != zslFormat) {
1120 LOGE("Combined preview and zsl usage flag is not supported");
1121 return -EINVAL;
1122 }
1123 if (isVideo && isZSL && videoFormat != zslFormat) {
1124 LOGE("Combined video and zsl usage flag is not supported");
1125 return -EINVAL;
1126 }
1127 }
1128 return NO_ERROR;
1129 }
1130
1131 /*===========================================================================
1132 * FUNCTION : validateUsageFlagsForEis
1133 *
1134 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1135 *
1136 * PARAMETERS :
1137 * @bEisEnable : Flag indicated that EIS is enabled.
1138 * @bEisSupportedSize : Flag indicating that there is a preview/video stream
1139 * within the EIS supported size.
1140 * @stream_list : streams to be configured
1141 *
1142 * RETURN :
1143 * NO_ERROR if the usage flags are supported
1144 * error code if usage flags are not supported
1145 *
1146 *==========================================================================*/
validateUsageFlagsForEis(bool bEisEnable,bool bEisSupportedSize,const camera3_stream_configuration_t * streamList)1147 int QCamera3HardwareInterface::validateUsageFlagsForEis(bool bEisEnable, bool bEisSupportedSize,
1148 const camera3_stream_configuration_t* streamList)
1149 {
1150 for (size_t j = 0; j < streamList->num_streams; j++) {
1151 const camera3_stream_t *newStream = streamList->streams[j];
1152
1153 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1154 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1155
1156 // Because EIS is "hard-coded" for certain use case, and current
1157 // implementation doesn't support shared preview and video on the same
1158 // stream, return failure if EIS is forced on.
1159 if (isPreview && isVideo && bEisEnable && bEisSupportedSize) {
1160 LOGE("Combined video and preview usage flag is not supported due to EIS");
1161 return -EINVAL;
1162 }
1163 }
1164 return NO_ERROR;
1165 }
1166
1167
1168 /*==============================================================================
1169 * FUNCTION : isSupportChannelNeeded
1170 *
1171 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1172 *
1173 * PARAMETERS :
1174 * @stream_list : streams to be configured
1175 * @stream_config_info : the config info for streams to be configured
1176 *
1177 * RETURN : Boolen true/false decision
1178 *
1179 *==========================================================================*/
isSupportChannelNeeded(camera3_stream_configuration_t * streamList,cam_stream_size_info_t stream_config_info)1180 bool QCamera3HardwareInterface::isSupportChannelNeeded(
1181 camera3_stream_configuration_t *streamList,
1182 cam_stream_size_info_t stream_config_info)
1183 {
1184 uint32_t i;
1185 bool pprocRequested = false;
1186 /* Check for conditions where PProc pipeline does not have any streams*/
1187 for (i = 0; i < stream_config_info.num_streams; i++) {
1188 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1189 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1190 pprocRequested = true;
1191 break;
1192 }
1193 }
1194
1195 if (pprocRequested == false )
1196 return true;
1197
1198 /* Dummy stream needed if only raw or jpeg streams present */
1199 for (i = 0; i < streamList->num_streams; i++) {
1200 switch(streamList->streams[i]->format) {
1201 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1202 case HAL_PIXEL_FORMAT_RAW10:
1203 case HAL_PIXEL_FORMAT_RAW16:
1204 case HAL_PIXEL_FORMAT_BLOB:
1205 break;
1206 default:
1207 return false;
1208 }
1209 }
1210 return true;
1211 }
1212
1213 /*==============================================================================
1214 * FUNCTION : getSensorOutputSize
1215 *
1216 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1217 *
1218 * PARAMETERS :
1219 * @sensor_dim : sensor output dimension (output)
1220 *
1221 * RETURN : int32_t type of status
1222 * NO_ERROR -- success
1223 * none-zero failure code
1224 *
1225 *==========================================================================*/
getSensorOutputSize(cam_dimension_t & sensor_dim)1226 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1227 {
1228 int32_t rc = NO_ERROR;
1229
1230 cam_dimension_t max_dim = {0, 0};
1231 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1232 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1233 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1234 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1235 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1236 }
1237
1238 clear_metadata_buffer(mParameters);
1239
1240 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1241 max_dim);
1242 if (rc != NO_ERROR) {
1243 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1244 return rc;
1245 }
1246
1247 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1248 if (rc != NO_ERROR) {
1249 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1250 return rc;
1251 }
1252
1253 clear_metadata_buffer(mParameters);
1254 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1255
1256 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1257 mParameters);
1258 if (rc != NO_ERROR) {
1259 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1260 return rc;
1261 }
1262
1263 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1264 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1265
1266 return rc;
1267 }
1268
1269 /*==============================================================================
1270 * FUNCTION : enablePowerHint
1271 *
1272 * DESCRIPTION: enable single powerhint for preview and different video modes.
1273 *
1274 * PARAMETERS :
1275 *
1276 * RETURN : NULL
1277 *
1278 *==========================================================================*/
enablePowerHint()1279 void QCamera3HardwareInterface::enablePowerHint()
1280 {
1281 if (!mPowerHintEnabled) {
1282 m_perfLock.powerHint(PowerHint::VIDEO_ENCODE, true);
1283 mPowerHintEnabled = true;
1284 }
1285 }
1286
1287 /*==============================================================================
1288 * FUNCTION : disablePowerHint
1289 *
1290 * DESCRIPTION: disable current powerhint.
1291 *
1292 * PARAMETERS :
1293 *
1294 * RETURN : NULL
1295 *
1296 *==========================================================================*/
disablePowerHint()1297 void QCamera3HardwareInterface::disablePowerHint()
1298 {
1299 if (mPowerHintEnabled) {
1300 m_perfLock.powerHint(PowerHint::VIDEO_ENCODE, false);
1301 mPowerHintEnabled = false;
1302 }
1303 }
1304
1305 /*==============================================================================
1306 * FUNCTION : addToPPFeatureMask
1307 *
1308 * DESCRIPTION: add additional features to pp feature mask based on
1309 * stream type and usecase
1310 *
1311 * PARAMETERS :
1312 * @stream_format : stream type for feature mask
1313 * @stream_idx : stream idx within postprocess_mask list to change
1314 *
1315 * RETURN : NULL
1316 *
1317 *==========================================================================*/
addToPPFeatureMask(int stream_format,uint32_t stream_idx)1318 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1319 uint32_t stream_idx)
1320 {
1321 char feature_mask_value[PROPERTY_VALUE_MAX];
1322 cam_feature_mask_t feature_mask;
1323 int args_converted;
1324 int property_len;
1325
1326 /* Get feature mask from property */
1327 property_len = property_get("persist.camera.hal3.feature",
1328 feature_mask_value, "0");
1329 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1330 (feature_mask_value[1] == 'x')) {
1331 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1332 } else {
1333 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1334 }
1335 if (1 != args_converted) {
1336 feature_mask = 0;
1337 LOGE("Wrong feature mask %s", feature_mask_value);
1338 return;
1339 }
1340
1341 switch (stream_format) {
1342 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1343 /* Add LLVD to pp feature mask only if video hint is enabled */
1344 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1345 mStreamConfigInfo.postprocess_mask[stream_idx]
1346 |= CAM_QTI_FEATURE_SW_TNR;
1347 LOGH("Added SW TNR to pp feature mask");
1348 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1349 mStreamConfigInfo.postprocess_mask[stream_idx]
1350 |= CAM_QCOM_FEATURE_LLVD;
1351 LOGH("Added LLVD SeeMore to pp feature mask");
1352 }
1353 break;
1354 }
1355 default:
1356 break;
1357 }
1358 LOGD("PP feature mask %llx",
1359 mStreamConfigInfo.postprocess_mask[stream_idx]);
1360 }
1361
1362 /*==============================================================================
1363 * FUNCTION : updateFpsInPreviewBuffer
1364 *
1365 * DESCRIPTION: update FPS information in preview buffer.
1366 *
1367 * PARAMETERS :
1368 * @metadata : pointer to metadata buffer
1369 * @frame_number: frame_number to look for in pending buffer list
1370 *
1371 * RETURN : None
1372 *
1373 *==========================================================================*/
updateFpsInPreviewBuffer(metadata_buffer_t * metadata,uint32_t frame_number)1374 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1375 uint32_t frame_number)
1376 {
1377 // Mark all pending buffers for this particular request
1378 // with corresponding framerate information
1379 for (List<PendingBuffersInRequest>::iterator req =
1380 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1381 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1382 for(List<PendingBufferInfo>::iterator j =
1383 req->mPendingBufferList.begin();
1384 j != req->mPendingBufferList.end(); j++) {
1385 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1386 if ((req->frame_number == frame_number) &&
1387 (channel->getStreamTypeMask() &
1388 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1389 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1390 CAM_INTF_PARM_FPS_RANGE, metadata) {
1391 int32_t cameraFps = float_range->max_fps;
1392 struct private_handle_t *priv_handle =
1393 (struct private_handle_t *)(*(j->buffer));
1394 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1395 }
1396 }
1397 }
1398 }
1399 }
1400
1401 /*==============================================================================
1402 * FUNCTION : updateTimeStampInPendingBuffers
1403 *
1404 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1405 * of a frame number
1406 *
1407 * PARAMETERS :
1408 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1409 * @timestamp : timestamp to be set
1410 *
1411 * RETURN : None
1412 *
1413 *==========================================================================*/
updateTimeStampInPendingBuffers(uint32_t frameNumber,nsecs_t timestamp)1414 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1415 uint32_t frameNumber, nsecs_t timestamp)
1416 {
1417 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1418 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1419 if (req->frame_number != frameNumber)
1420 continue;
1421
1422 for (auto k = req->mPendingBufferList.begin();
1423 k != req->mPendingBufferList.end(); k++ ) {
1424 struct private_handle_t *priv_handle =
1425 (struct private_handle_t *) (*(k->buffer));
1426 setMetaData(priv_handle, SET_VT_TIMESTAMP, ×tamp);
1427 }
1428 }
1429 return;
1430 }
1431
1432 /*===========================================================================
1433 * FUNCTION : configureStreams
1434 *
1435 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1436 * and output streams.
1437 *
1438 * PARAMETERS :
1439 * @stream_list : streams to be configured
1440 *
1441 * RETURN :
1442 *
1443 *==========================================================================*/
configureStreams(camera3_stream_configuration_t * streamList)1444 int QCamera3HardwareInterface::configureStreams(
1445 camera3_stream_configuration_t *streamList)
1446 {
1447 ATRACE_CALL();
1448 int rc = 0;
1449
1450 // Acquire perfLock before configure streams
1451 m_perfLock.lock_acq();
1452 rc = configureStreamsPerfLocked(streamList);
1453 m_perfLock.lock_rel();
1454
1455 return rc;
1456 }
1457
1458 /*===========================================================================
1459 * FUNCTION : validateStreamCombination
1460 *
1461 * DESCRIPTION: Validate a given stream combination.
1462 *
1463 * PARAMETERS :
1464 * @cameraId : camera Id.
1465 * @stream_list : stream combination to be validated.
1466 * @status : validation status.
1467 *
1468 * RETURN : int32_t type of status
1469 * NO_ERROR -- success
1470 * none-zero failure code
1471 *==========================================================================*/
validateStreamCombination(uint32_t cameraId,camera3_stream_configuration_t * streamList,StreamValidateStatus * status)1472 int32_t QCamera3HardwareInterface::validateStreamCombination(uint32_t cameraId,
1473 camera3_stream_configuration_t *streamList /*in*/, StreamValidateStatus *status /*out*/)
1474 {
1475 size_t rawStreamCnt = 0;
1476 size_t stallStreamCnt = 0;
1477 size_t processedStreamCnt = 0;
1478 size_t numYuv888OnEncoder = 0;
1479 bool bJpegExceeds4K = false;
1480 bool bJpegOnEncoder = false;
1481 uint32_t width_ratio;
1482 uint32_t height_ratio;
1483 bool isJpeg = false;
1484 cam_dimension_t jpegSize = {0, 0};
1485 camera3_stream_t *zslStream = nullptr;
1486 uint32_t maxEisWidth = 0;
1487 uint32_t maxEisHeight = 0;
1488
1489 if (status == nullptr) {
1490 LOGE("NULL stream status");
1491 return BAD_VALUE;
1492 }
1493
1494 // Sanity check stream_list
1495 if (streamList == NULL) {
1496 LOGE("NULL stream configuration");
1497 return BAD_VALUE;
1498 }
1499 if (streamList->streams == NULL) {
1500 LOGE("NULL stream list");
1501 return BAD_VALUE;
1502 }
1503
1504 if (streamList->num_streams < 1) {
1505 LOGE("Bad number of streams requested: %d",
1506 streamList->num_streams);
1507 return BAD_VALUE;
1508 }
1509
1510 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1511 LOGE("Maximum number of streams %d exceeded: %d",
1512 MAX_NUM_STREAMS, streamList->num_streams);
1513 return BAD_VALUE;
1514 }
1515
1516 auto rc = validateUsageFlags(streamList);
1517 if (rc != NO_ERROR) {
1518 return rc;
1519 }
1520
1521 rc = validateStreamDimensions(cameraId, streamList);
1522 if (rc == NO_ERROR) {
1523 rc = validateStreamRotations(streamList);
1524 }
1525 if (rc != NO_ERROR) {
1526 LOGE("Invalid stream configuration requested!");
1527 return rc;
1528 }
1529
1530 size_t count = IS_TYPE_MAX;
1531 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
1532 for (size_t i = 0; i < count; i++) {
1533 if (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1534 status->bEisSupported = true;
1535 break;
1536 }
1537 }
1538
1539 if (status->bEisSupported) {
1540 maxEisWidth = MAX_EIS_WIDTH;
1541 maxEisHeight = MAX_EIS_HEIGHT;
1542 }
1543
1544 status->maxViewfinderSize = gCamCapability[cameraId]->max_viewfinder_size;
1545 status->largeYuv888Size = {0, 0};
1546
1547 /* stream configurations */
1548 for (size_t i = 0; i < streamList->num_streams; i++) {
1549 camera3_stream_t *newStream = streamList->streams[i];
1550 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1551 "height = %d, rotation = %d, usage = 0x%x",
1552 i, newStream->stream_type, newStream->format,
1553 newStream->width, newStream->height, newStream->rotation,
1554 newStream->usage);
1555 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1556 status->isZsl = true;
1557 status->inputStream = newStream;
1558 }
1559
1560 if (IS_USAGE_ZSL(newStream->usage)) {
1561 if (zslStream != nullptr) {
1562 LOGE("Multiple input/reprocess streams requested!");
1563 return BAD_VALUE;
1564 }
1565 zslStream = newStream;
1566 }
1567
1568 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1569 isJpeg = true;
1570 jpegSize.width = newStream->width;
1571 jpegSize.height = newStream->height;
1572 if (newStream->width > VIDEO_4K_WIDTH ||
1573 newStream->height > VIDEO_4K_HEIGHT)
1574 bJpegExceeds4K = true;
1575 }
1576
1577 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1578 (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
1579 if (IS_USAGE_VIDEO(newStream->usage)) {
1580 status->videoWidth = newStream->width;
1581 status->videoHeight = newStream->height;
1582 status->bIsVideo = true;
1583 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1584 (VIDEO_4K_HEIGHT <= newStream->height)) {
1585 status->bIs4KVideo = true;
1586 }
1587 }
1588 status->bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
1589 (newStream->height <= maxEisHeight);
1590 }
1591 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1592 switch (newStream->format) {
1593 case HAL_PIXEL_FORMAT_BLOB:
1594 stallStreamCnt++;
1595 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1596 newStream->height)) {
1597 status->numStreamsOnEncoder++;
1598 bJpegOnEncoder = true;
1599 }
1600 width_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.width,
1601 newStream->width);
1602 height_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.height,
1603 newStream->height);;
1604 FATAL_IF(gCamCapability[cameraId]->max_downscale_factor == 0,
1605 "FATAL: max_downscale_factor cannot be zero and so assert");
1606 if ( (width_ratio > gCamCapability[cameraId]->max_downscale_factor) ||
1607 (height_ratio > gCamCapability[cameraId]->max_downscale_factor)) {
1608 LOGH("Setting small jpeg size flag to true");
1609 status->bSmallJpegSize = true;
1610 }
1611 break;
1612 case HAL_PIXEL_FORMAT_RAW10:
1613 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1614 case HAL_PIXEL_FORMAT_RAW16:
1615 rawStreamCnt++;
1616 break;
1617 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1618 processedStreamCnt++;
1619 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1620 newStream->height)) {
1621 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1622 !IS_USAGE_ZSL(newStream->usage)) {
1623 status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1624 }
1625 status->numStreamsOnEncoder++;
1626 }
1627 break;
1628 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1629 processedStreamCnt++;
1630 if (isOnEncoder(status->maxViewfinderSize, newStream->width,
1631 newStream->height)) {
1632 // If Yuv888 size is not greater than 4K, set feature mask
1633 // to SUPERSET so that it support concurrent request on
1634 // YUV and JPEG.
1635 if (newStream->width <= VIDEO_4K_WIDTH &&
1636 newStream->height <= VIDEO_4K_HEIGHT) {
1637 status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1638 }
1639 status->numStreamsOnEncoder++;
1640 numYuv888OnEncoder++;
1641 status->largeYuv888Size.width = newStream->width;
1642 status->largeYuv888Size.height = newStream->height;
1643 }
1644 break;
1645 default:
1646 LOGE("not a supported format 0x%x", newStream->format);
1647 return BAD_VALUE;
1648 }
1649
1650 }
1651 }
1652
1653 if (validateUsageFlagsForEis(status->bEisSupported, status->bEisSupportedSize, streamList) !=
1654 NO_ERROR) {
1655 return BAD_VALUE;
1656 }
1657
1658 /* Check if num_streams is sane */
1659 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1660 rawStreamCnt > MAX_RAW_STREAMS ||
1661 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1662 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1663 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1664 return BAD_VALUE;
1665 }
1666 /* Check whether we have zsl stream or 4k video case */
1667 if (status->isZsl && status->bIs4KVideo) {
1668 LOGE("Currently invalid configuration ZSL&Video!");
1669 return BAD_VALUE;
1670 }
1671 /* Check if stream sizes are sane */
1672 if (status->numStreamsOnEncoder > 2) {
1673 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1674 return BAD_VALUE;
1675 } else if (1 < status->numStreamsOnEncoder){
1676 status->bUseCommonFeatureMask = true;
1677 LOGH("Multiple streams above max viewfinder size, common mask needed");
1678 }
1679
1680 /* Check if BLOB size is greater than 4k in 4k recording case */
1681 if (status->bIs4KVideo && bJpegExceeds4K) {
1682 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1683 return BAD_VALUE;
1684 }
1685
1686 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1687 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1688 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1689 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1690 // configurations:
1691 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1692 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1693 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1694 if (!status->isZsl && bJpegOnEncoder && bJpegExceeds4K && status->bUseCommonFeatureMask) {
1695 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1696 __func__);
1697 return BAD_VALUE;
1698 }
1699
1700 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1701 // the YUV stream's size is greater or equal to the JPEG size, set common
1702 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1703 if (numYuv888OnEncoder && isOnEncoder(status->maxViewfinderSize,
1704 jpegSize.width, jpegSize.height) &&
1705 status->largeYuv888Size.width > jpegSize.width &&
1706 status->largeYuv888Size.height > jpegSize.height) {
1707 status->bYuv888OverrideJpeg = true;
1708 } else if (!isJpeg && status->numStreamsOnEncoder > 1) {
1709 status->commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1710 }
1711
1712 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1713 status->maxViewfinderSize.width, status->maxViewfinderSize.height, status->isZsl,
1714 status->bUseCommonFeatureMask, status->commonFeatureMask);
1715 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1716 status->numStreamsOnEncoder, processedStreamCnt, stallStreamCnt,
1717 status->bSmallJpegSize);
1718
1719 return NO_ERROR;
1720 }
1721
1722 /*===========================================================================
1723 * FUNCTION : configureStreamsPerfLocked
1724 *
1725 * DESCRIPTION: configureStreams while perfLock is held.
1726 *
1727 * PARAMETERS :
1728 * @stream_list : streams to be configured
1729 *
1730 * RETURN : int32_t type of status
1731 * NO_ERROR -- success
1732 * none-zero failure code
1733 *==========================================================================*/
configureStreamsPerfLocked(camera3_stream_configuration_t * streamList)1734 int QCamera3HardwareInterface::configureStreamsPerfLocked(
1735 camera3_stream_configuration_t *streamList)
1736 {
1737 ATRACE_CALL();
1738
1739 StreamValidateStatus streamStatus;
1740 auto rc = validateStreamCombination(mCameraId, streamList, &streamStatus);
1741 if (NO_ERROR != rc) {
1742 return rc;
1743 }
1744
1745 mOpMode = streamList->operation_mode;
1746 LOGD("mOpMode: %d", mOpMode);
1747
1748 /* first invalidate all the steams in the mStreamList
1749 * if they appear again, they will be validated */
1750 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1751 it != mStreamInfo.end(); it++) {
1752 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1753 if (channel) {
1754 channel->stop();
1755 }
1756 (*it)->status = INVALID;
1757 }
1758
1759 if (mRawDumpChannel) {
1760 mRawDumpChannel->stop();
1761 delete mRawDumpChannel;
1762 mRawDumpChannel = NULL;
1763 }
1764
1765 if (mSupportChannel)
1766 mSupportChannel->stop();
1767
1768 if (mAnalysisChannel) {
1769 mAnalysisChannel->stop();
1770 }
1771 if (mMetadataChannel) {
1772 /* If content of mStreamInfo is not 0, there is metadata stream */
1773 mMetadataChannel->stop();
1774 }
1775 if (mChannelHandle) {
1776 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1777 mChannelHandle);
1778 LOGD("stopping channel %d", mChannelHandle);
1779 }
1780
1781 pthread_mutex_lock(&mMutex);
1782
1783 // Check state
1784 switch (mState) {
1785 case INITIALIZED:
1786 case CONFIGURED:
1787 case STARTED:
1788 /* valid state */
1789 break;
1790 default:
1791 LOGE("Invalid state %d", mState);
1792 pthread_mutex_unlock(&mMutex);
1793 return -ENODEV;
1794 }
1795
1796 /* Check whether we have video stream */
1797 m_bIs4KVideo = streamStatus.bIs4KVideo;
1798 m_bIsVideo = streamStatus.bIsVideo;
1799 m_bEisSupportedSize = streamStatus.bEisSupportedSize;
1800 m_bTnrEnabled = false;
1801 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1802
1803 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1804
1805 /*OIS configuration*/
1806 bool oisSupported = false;
1807 size_t count = CAM_OPT_STAB_MAX;
1808 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1809 for (size_t i = 0; i < count; i++) {
1810 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1811 oisSupported = true;
1812 break;
1813 }
1814 }
1815
1816 /* EIS setprop control */
1817 char eis_prop[PROPERTY_VALUE_MAX];
1818 uint8_t eis_prop_set;
1819 memset(eis_prop, 0, sizeof(eis_prop));
1820 property_get("persist.camera.eis.enable", eis_prop, "0");
1821 eis_prop_set = (uint8_t)atoi(eis_prop);
1822
1823 m_bEisEnable = eis_prop_set && (!oisSupported && streamStatus.bEisSupported) &&
1824 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1825 (gCamCapability[mCameraId]->position != CAM_POSITION_FRONT);
1826
1827 /* Logic to enable/disable TNR based on specific config size/etc.*/
1828 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1829 ((streamStatus.videoWidth == 1920 && streamStatus.videoHeight == 1080) ||
1830 (streamStatus.videoWidth == 1280 && streamStatus.videoHeight == 720)) &&
1831 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1832 m_bTnrEnabled = true;
1833
1834 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1835 camera3_stream_t *jpegStream = NULL;
1836 for (size_t i = 0; i < streamList->num_streams; i++) {
1837 camera3_stream_t *newStream = streamList->streams[i];
1838 LOGH("newStream type = %d, stream format = %d "
1839 "stream size : %d x %d, stream rotation = %d",
1840 newStream->stream_type, newStream->format,
1841 newStream->width, newStream->height, newStream->rotation);
1842 //if the stream is in the mStreamList validate it
1843 bool stream_exists = false;
1844 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1845 it != mStreamInfo.end(); it++) {
1846 if ((*it)->stream == newStream) {
1847 QCamera3ProcessingChannel *channel =
1848 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1849 stream_exists = true;
1850 if (channel)
1851 delete channel;
1852 (*it)->status = VALID;
1853 (*it)->stream->priv = NULL;
1854 (*it)->channel = NULL;
1855 }
1856 }
1857 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1858 //new stream
1859 stream_info_t* stream_info;
1860 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1861 if (!stream_info) {
1862 LOGE("Could not allocate stream info");
1863 rc = -ENOMEM;
1864 pthread_mutex_unlock(&mMutex);
1865 return rc;
1866 }
1867 stream_info->stream = newStream;
1868 stream_info->status = VALID;
1869 stream_info->channel = NULL;
1870 mStreamInfo.push_back(stream_info);
1871 }
1872 /* Covers Opaque ZSL and API1 F/W ZSL */
1873 if (IS_USAGE_ZSL(newStream->usage)
1874 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1875 if (zslStream != NULL) {
1876 LOGE("Multiple input/reprocess streams requested!");
1877 pthread_mutex_unlock(&mMutex);
1878 return BAD_VALUE;
1879 }
1880 zslStream = newStream;
1881 }
1882 /* Covers YUV reprocess */
1883 if (streamStatus.inputStream != NULL) {
1884 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1885 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1886 && streamStatus.inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1887 && streamStatus.inputStream->width == newStream->width
1888 && streamStatus.inputStream->height == newStream->height) {
1889 if (zslStream != NULL) {
1890 /* This scenario indicates multiple YUV streams with same size
1891 * as input stream have been requested, since zsl stream handle
1892 * is solely use for the purpose of overriding the size of streams
1893 * which share h/w streams we will just make a guess here as to
1894 * which of the stream is a ZSL stream, this will be refactored
1895 * once we make generic logic for streams sharing encoder output
1896 */
1897 LOGH("Warning, Multiple ip/reprocess streams requested!");
1898 }
1899 zslStream = newStream;
1900 }
1901 }
1902 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1903 jpegStream = newStream;
1904 }
1905 }
1906
1907 /* If a zsl stream is set, we know that we have configured at least one input or
1908 bidirectional stream */
1909 if (NULL != zslStream) {
1910 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1911 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1912 mInputStreamInfo.format = zslStream->format;
1913 mInputStreamInfo.usage = zslStream->usage;
1914 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1915 mInputStreamInfo.dim.width,
1916 mInputStreamInfo.dim.height,
1917 mInputStreamInfo.format, mInputStreamInfo.usage);
1918 }
1919
1920 cleanAndSortStreamInfo();
1921 if (mMetadataChannel) {
1922 delete mMetadataChannel;
1923 mMetadataChannel = NULL;
1924 }
1925 if (mSupportChannel) {
1926 delete mSupportChannel;
1927 mSupportChannel = NULL;
1928 }
1929
1930 if (mAnalysisChannel) {
1931 delete mAnalysisChannel;
1932 mAnalysisChannel = NULL;
1933 }
1934
1935 if (mDummyBatchChannel) {
1936 delete mDummyBatchChannel;
1937 mDummyBatchChannel = NULL;
1938 }
1939
1940 //Create metadata channel and initialize it
1941 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1942 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1943 gCamCapability[mCameraId]->color_arrangement);
1944 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1945 mChannelHandle, mCameraHandle->ops, captureResultCb,
1946 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1947 if (mMetadataChannel == NULL) {
1948 LOGE("failed to allocate metadata channel");
1949 rc = -ENOMEM;
1950 pthread_mutex_unlock(&mMutex);
1951 return rc;
1952 }
1953 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1954 if (rc < 0) {
1955 LOGE("metadata channel initialization failed");
1956 delete mMetadataChannel;
1957 mMetadataChannel = NULL;
1958 pthread_mutex_unlock(&mMutex);
1959 return rc;
1960 }
1961
1962 // Create analysis stream all the time, even when h/w support is not available
1963 {
1964 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1965 cam_analysis_info_t analysisInfo;
1966 rc = mCommon.getAnalysisInfo(
1967 FALSE,
1968 TRUE,
1969 analysisFeatureMask,
1970 &analysisInfo);
1971 if (rc != NO_ERROR) {
1972 LOGE("getAnalysisInfo failed, ret = %d", rc);
1973 pthread_mutex_unlock(&mMutex);
1974 return rc;
1975 }
1976
1977 cam_color_filter_arrangement_t analysis_color_arrangement =
1978 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1979 CAM_FILTER_ARRANGEMENT_Y :
1980 gCamCapability[mCameraId]->color_arrangement);
1981 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1982 analysis_color_arrangement);
1983
1984 mAnalysisChannel = new QCamera3SupportChannel(
1985 mCameraHandle->camera_handle,
1986 mChannelHandle,
1987 mCameraHandle->ops,
1988 &analysisInfo.analysis_padding_info,
1989 analysisFeatureMask,
1990 CAM_STREAM_TYPE_ANALYSIS,
1991 &analysisInfo.analysis_max_res,
1992 (analysisInfo.analysis_format
1993 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1994 : CAM_FORMAT_YUV_420_NV21),
1995 analysisInfo.hw_analysis_supported,
1996 this,
1997 0); // force buffer count to 0
1998 if (!mAnalysisChannel) {
1999 LOGE("H/W Analysis channel cannot be created");
2000 pthread_mutex_unlock(&mMutex);
2001 return -ENOMEM;
2002 }
2003 }
2004
2005 bool isRawStreamRequested = false;
2006 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2007 /* Allocate channel objects for the requested streams */
2008 for (size_t i = 0; i < streamList->num_streams; i++) {
2009 camera3_stream_t *newStream = streamList->streams[i];
2010 uint32_t stream_usage = newStream->usage;
2011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2012 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2013 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2014 || IS_USAGE_ZSL(newStream->usage)) &&
2015 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2017 if (streamStatus.bUseCommonFeatureMask) {
2018 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2019 streamStatus.commonFeatureMask;
2020 } else {
2021 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2022 CAM_QCOM_FEATURE_NONE;
2023 }
2024
2025 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2026 LOGH("Input stream configured, reprocess config");
2027 } else {
2028 //for non zsl streams find out the format
2029 switch (newStream->format) {
2030 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2031 {
2032 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2033 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 /* add additional features to pp feature mask */
2035 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2036 mStreamConfigInfo.num_streams);
2037
2038 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2039 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2040 CAM_STREAM_TYPE_VIDEO;
2041 if (m_bTnrEnabled && m_bTnrVideo) {
2042 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2043 CAM_QCOM_FEATURE_CPP_TNR;
2044 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2045 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2046 ~CAM_QCOM_FEATURE_CDS;
2047 }
2048 } else {
2049 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2050 CAM_STREAM_TYPE_PREVIEW;
2051 if (m_bTnrEnabled && m_bTnrPreview) {
2052 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2053 CAM_QCOM_FEATURE_CPP_TNR;
2054 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2055 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2056 ~CAM_QCOM_FEATURE_CDS;
2057 }
2058 padding_info.width_padding = mSurfaceStridePadding;
2059 padding_info.height_padding = CAM_PAD_TO_2;
2060 }
2061 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2062 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2063 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2064 newStream->height;
2065 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2066 newStream->width;
2067 }
2068 }
2069 break;
2070 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2071 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2072 if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2073 newStream->height)) {
2074 if (streamStatus.bUseCommonFeatureMask)
2075 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2076 streamStatus.commonFeatureMask;
2077 else
2078 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2079 CAM_QCOM_FEATURE_NONE;
2080 } else {
2081 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2082 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2083 }
2084 break;
2085 case HAL_PIXEL_FORMAT_BLOB:
2086 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2087 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2088 if ((m_bIs4KVideo && !streamStatus.isZsl) ||
2089 (streamStatus.bSmallJpegSize && !streamStatus.isZsl)) {
2090 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2091 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2092 } else {
2093 if (streamStatus.bUseCommonFeatureMask &&
2094 isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
2095 newStream->height)) {
2096 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2097 streamStatus.commonFeatureMask;
2098 } else {
2099 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2100 }
2101 }
2102 if (streamStatus.isZsl) {
2103 if (zslStream) {
2104 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2105 (int32_t)zslStream->width;
2106 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2107 (int32_t)zslStream->height;
2108 } else {
2109 LOGE("Error, No ZSL stream identified");
2110 pthread_mutex_unlock(&mMutex);
2111 return -EINVAL;
2112 }
2113 } else if (m_bIs4KVideo) {
2114 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2115 (int32_t) streamStatus.videoWidth;
2116 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2117 (int32_t) streamStatus.videoHeight;
2118 } else if (streamStatus.bYuv888OverrideJpeg) {
2119 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2120 (int32_t) streamStatus.largeYuv888Size.width;
2121 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2122 (int32_t) streamStatus.largeYuv888Size.height;
2123 }
2124 break;
2125 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2126 case HAL_PIXEL_FORMAT_RAW16:
2127 case HAL_PIXEL_FORMAT_RAW10:
2128 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2129 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2130 isRawStreamRequested = true;
2131 break;
2132 default:
2133 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2134 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2135 break;
2136 }
2137 }
2138
2139 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2140 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2141 gCamCapability[mCameraId]->color_arrangement);
2142
2143 if (newStream->priv == NULL) {
2144 //New stream, construct channel
2145 switch (newStream->stream_type) {
2146 case CAMERA3_STREAM_INPUT:
2147 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2148 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2149 break;
2150 case CAMERA3_STREAM_BIDIRECTIONAL:
2151 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2152 GRALLOC_USAGE_HW_CAMERA_WRITE;
2153 break;
2154 case CAMERA3_STREAM_OUTPUT:
2155 /* For video encoding stream, set read/write rarely
2156 * flag so that they may be set to un-cached */
2157 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2158 newStream->usage |=
2159 (GRALLOC_USAGE_SW_READ_RARELY |
2160 GRALLOC_USAGE_SW_WRITE_RARELY |
2161 GRALLOC_USAGE_HW_CAMERA_WRITE);
2162 else if (IS_USAGE_ZSL(newStream->usage))
2163 {
2164 LOGD("ZSL usage flag skipping");
2165 }
2166 else if (newStream == zslStream
2167 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2168 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2169 } else
2170 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2171 break;
2172 default:
2173 LOGE("Invalid stream_type %d", newStream->stream_type);
2174 break;
2175 }
2176
2177 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2178 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2179 QCamera3ProcessingChannel *channel = NULL;
2180 switch (newStream->format) {
2181 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2182 if ((newStream->usage &
2183 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2184 (streamList->operation_mode ==
2185 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2186 ) {
2187 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2188 mChannelHandle, mCameraHandle->ops, captureResultCb,
2189 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2190 this,
2191 newStream,
2192 (cam_stream_type_t)
2193 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2194 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2195 mMetadataChannel,
2196 0); //heap buffers are not required for HFR video channel
2197 if (channel == NULL) {
2198 LOGE("allocation of channel failed");
2199 pthread_mutex_unlock(&mMutex);
2200 return -ENOMEM;
2201 }
2202 //channel->getNumBuffers() will return 0 here so use
2203 //MAX_INFLIGH_HFR_REQUESTS
2204 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2205 newStream->priv = channel;
2206 LOGI("num video buffers in HFR mode: %d",
2207 MAX_INFLIGHT_HFR_REQUESTS);
2208 } else {
2209 /* Copy stream contents in HFR preview only case to create
2210 * dummy batch channel so that sensor streaming is in
2211 * HFR mode */
2212 if (!m_bIsVideo && (streamList->operation_mode ==
2213 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2214 mDummyBatchStream = *newStream;
2215 mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
2216 }
2217 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2218 mChannelHandle, mCameraHandle->ops, captureResultCb,
2219 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2220 this,
2221 newStream,
2222 (cam_stream_type_t)
2223 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2224 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2225 mMetadataChannel,
2226 MAX_INFLIGHT_REQUESTS);
2227 if (channel == NULL) {
2228 LOGE("allocation of channel failed");
2229 pthread_mutex_unlock(&mMutex);
2230 return -ENOMEM;
2231 }
2232 newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2233 newStream->priv = channel;
2234 }
2235 break;
2236 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2237 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2238 mChannelHandle,
2239 mCameraHandle->ops, captureResultCb,
2240 setBufferErrorStatus, &padding_info,
2241 this,
2242 newStream,
2243 (cam_stream_type_t)
2244 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2245 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2246 mMetadataChannel);
2247 if (channel == NULL) {
2248 LOGE("allocation of YUV channel failed");
2249 pthread_mutex_unlock(&mMutex);
2250 return -ENOMEM;
2251 }
2252 newStream->max_buffers = channel->getNumBuffers();
2253 newStream->priv = channel;
2254 break;
2255 }
2256 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2257 case HAL_PIXEL_FORMAT_RAW16:
2258 case HAL_PIXEL_FORMAT_RAW10:
2259 mRawChannel = new QCamera3RawChannel(
2260 mCameraHandle->camera_handle, mChannelHandle,
2261 mCameraHandle->ops, captureResultCb,
2262 setBufferErrorStatus, &padding_info,
2263 this, newStream,
2264 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2265 mMetadataChannel,
2266 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2267 if (mRawChannel == NULL) {
2268 LOGE("allocation of raw channel failed");
2269 pthread_mutex_unlock(&mMutex);
2270 return -ENOMEM;
2271 }
2272 newStream->max_buffers = mRawChannel->getNumBuffers();
2273 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2274 break;
2275 case HAL_PIXEL_FORMAT_BLOB:
2276 // Max live snapshot inflight buffer is 1. This is to mitigate
2277 // frame drop issues for video snapshot. The more buffers being
2278 // allocated, the more frame drops there are.
2279 mPictureChannel = new QCamera3PicChannel(
2280 mCameraHandle->camera_handle, mChannelHandle,
2281 mCameraHandle->ops, captureResultCb,
2282 setBufferErrorStatus, &padding_info, this, newStream,
2283 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2284 m_bIs4KVideo, streamStatus.isZsl, mMetadataChannel,
2285 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2286 if (mPictureChannel == NULL) {
2287 LOGE("allocation of channel failed");
2288 pthread_mutex_unlock(&mMutex);
2289 return -ENOMEM;
2290 }
2291 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2292 newStream->max_buffers = mPictureChannel->getNumBuffers();
2293 mPictureChannel->overrideYuvSize(
2294 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2295 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2296 break;
2297
2298 default:
2299 LOGE("not a supported format 0x%x", newStream->format);
2300 pthread_mutex_unlock(&mMutex);
2301 return -EINVAL;
2302 }
2303 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2304 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2305 } else {
2306 LOGE("Error, Unknown stream type");
2307 pthread_mutex_unlock(&mMutex);
2308 return -EINVAL;
2309 }
2310
2311 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2312 if (channel != NULL && channel->isUBWCEnabled()) {
2313 cam_format_t fmt = channel->getStreamDefaultFormat(
2314 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2315 newStream->width, newStream->height);
2316 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2317 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2318 }
2319 }
2320
2321 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2322 it != mStreamInfo.end(); it++) {
2323 if ((*it)->stream == newStream) {
2324 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2325 break;
2326 }
2327 }
2328 } else {
2329 // Channel already exists for this stream
2330 // Do nothing for now
2331 }
2332 padding_info = gCamCapability[mCameraId]->padding_info;
2333
2334 /* Do not add entries for input stream in metastream info
2335 * since there is no real stream associated with it
2336 */
2337 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2338 mStreamConfigInfo.num_streams++;
2339 }
2340
2341 //RAW DUMP channel
2342 if (mEnableRawDump && isRawStreamRequested == false){
2343 cam_dimension_t rawDumpSize;
2344 rawDumpSize = getMaxRawSize(mCameraId);
2345 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2346 setPAAFSupport(rawDumpFeatureMask,
2347 CAM_STREAM_TYPE_RAW,
2348 gCamCapability[mCameraId]->color_arrangement);
2349 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2350 mChannelHandle,
2351 mCameraHandle->ops,
2352 rawDumpSize,
2353 &padding_info,
2354 this, rawDumpFeatureMask);
2355 if (!mRawDumpChannel) {
2356 LOGE("Raw Dump channel cannot be created");
2357 pthread_mutex_unlock(&mMutex);
2358 return -ENOMEM;
2359 }
2360 }
2361
2362
2363 if (mAnalysisChannel) {
2364 cam_analysis_info_t analysisInfo;
2365 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2366 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2367 CAM_STREAM_TYPE_ANALYSIS;
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2369 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2370 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2371 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2372 &analysisInfo);
2373 if (rc != NO_ERROR) {
2374 LOGE("getAnalysisInfo failed, ret = %d", rc);
2375 pthread_mutex_unlock(&mMutex);
2376 return rc;
2377 }
2378 cam_color_filter_arrangement_t analysis_color_arrangement =
2379 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2380 CAM_FILTER_ARRANGEMENT_Y :
2381 gCamCapability[mCameraId]->color_arrangement);
2382 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2383 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2384 analysis_color_arrangement);
2385
2386 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2387 analysisInfo.analysis_max_res;
2388 mStreamConfigInfo.num_streams++;
2389 }
2390
2391 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2392 cam_analysis_info_t supportInfo;
2393 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2394 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2395 setPAAFSupport(callbackFeatureMask,
2396 CAM_STREAM_TYPE_CALLBACK,
2397 gCamCapability[mCameraId]->color_arrangement);
2398 rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2399 if (rc != NO_ERROR) {
2400 LOGE("getAnalysisInfo failed, ret = %d", rc);
2401 pthread_mutex_unlock(&mMutex);
2402 return rc;
2403 }
2404 mSupportChannel = new QCamera3SupportChannel(
2405 mCameraHandle->camera_handle,
2406 mChannelHandle,
2407 mCameraHandle->ops,
2408 &gCamCapability[mCameraId]->padding_info,
2409 callbackFeatureMask,
2410 CAM_STREAM_TYPE_CALLBACK,
2411 &QCamera3SupportChannel::kDim,
2412 CAM_FORMAT_YUV_420_NV21,
2413 supportInfo.hw_analysis_supported,
2414 this, 0);
2415 if (!mSupportChannel) {
2416 LOGE("dummy channel cannot be created");
2417 pthread_mutex_unlock(&mMutex);
2418 return -ENOMEM;
2419 }
2420 }
2421
2422 if (mSupportChannel) {
2423 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2424 QCamera3SupportChannel::kDim;
2425 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2426 CAM_STREAM_TYPE_CALLBACK;
2427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2428 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2429 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2430 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2431 gCamCapability[mCameraId]->color_arrangement);
2432 mStreamConfigInfo.num_streams++;
2433 }
2434
2435 if (mRawDumpChannel) {
2436 cam_dimension_t rawSize;
2437 rawSize = getMaxRawSize(mCameraId);
2438 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2439 rawSize;
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_RAW;
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2443 CAM_QCOM_FEATURE_NONE;
2444 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2445 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2446 gCamCapability[mCameraId]->color_arrangement);
2447 mStreamConfigInfo.num_streams++;
2448 }
2449 /* In HFR mode, if video stream is not added, create a dummy channel so that
2450 * ISP can create a batch mode even for preview only case. This channel is
2451 * never 'start'ed (no stream-on), it is only 'initialized' */
2452 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2453 !m_bIsVideo) {
2454 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2455 setPAAFSupport(dummyFeatureMask,
2456 CAM_STREAM_TYPE_VIDEO,
2457 gCamCapability[mCameraId]->color_arrangement);
2458 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2459 mChannelHandle,
2460 mCameraHandle->ops, captureResultCb,
2461 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2462 this,
2463 &mDummyBatchStream,
2464 CAM_STREAM_TYPE_VIDEO,
2465 dummyFeatureMask,
2466 mMetadataChannel);
2467 if (NULL == mDummyBatchChannel) {
2468 LOGE("creation of mDummyBatchChannel failed."
2469 "Preview will use non-hfr sensor mode ");
2470 }
2471 }
2472 if (mDummyBatchChannel) {
2473 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2474 mDummyBatchStream.width;
2475 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2476 mDummyBatchStream.height;
2477 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2478 CAM_STREAM_TYPE_VIDEO;
2479 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2480 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2481 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2482 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2483 gCamCapability[mCameraId]->color_arrangement);
2484 mStreamConfigInfo.num_streams++;
2485 }
2486
2487 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2488 mStreamConfigInfo.buffer_info.max_buffers =
2489 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2490
2491 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2492 for (pendingRequestIterator i = mPendingRequestsList.begin();
2493 i != mPendingRequestsList.end();) {
2494 i = erasePendingRequest(i);
2495 }
2496 mPendingFrameDropList.clear();
2497 // Initialize/Reset the pending buffers list
2498 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2499 req.mPendingBufferList.clear();
2500 }
2501 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2502
2503 mPendingReprocessResultList.clear();
2504
2505 mCurJpegMeta.clear();
2506 //Get min frame duration for this streams configuration
2507 deriveMinFrameDuration();
2508
2509 // Update state
2510 mState = CONFIGURED;
2511
2512 memset(&mLastEISCropInfo, 0, sizeof(mLastEISCropInfo));
2513
2514 if (streamList->session_parameters != nullptr) {
2515 CameraMetadata meta;
2516 meta = streamList->session_parameters;
2517
2518 // send an unconfigure to the backend so that the isp
2519 // resources are deallocated
2520 if (!mFirstConfiguration) {
2521 cam_stream_size_info_t stream_config_info;
2522 int32_t hal_version = CAM_HAL_V3;
2523 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2524 stream_config_info.buffer_info.min_buffers =
2525 MIN_INFLIGHT_REQUESTS;
2526 stream_config_info.buffer_info.max_buffers =
2527 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2528 clear_metadata_buffer(mParameters);
2529 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2530 CAM_INTF_PARM_HAL_VERSION, hal_version);
2531 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2532 CAM_INTF_META_STREAM_INFO, stream_config_info);
2533 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2534 mParameters);
2535 if (rc < 0) {
2536 LOGE("set_parms for unconfigure failed");
2537 pthread_mutex_unlock(&mMutex);
2538 return rc;
2539 }
2540 }
2541 /* get eis information for stream configuration */
2542 cam_is_type_t is_type;
2543 char is_type_value[PROPERTY_VALUE_MAX];
2544 property_get("persist.camera.is_type", is_type_value, "0");
2545 is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2546
2547 int32_t hal_version = CAM_HAL_V3;
2548 clear_metadata_buffer(mParameters);
2549 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2550 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
2551
2552 uint8_t fwkVideoStabMode=0;
2553 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
2554 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
2555 }
2556 //If EIS is enabled, turn it on for video
2557 bool setEis = m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
2558 !meta.exists(QCAMERA3_USE_AV_TIMER);
2559 int32_t vsMode;
2560 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
2562 rc = BAD_VALUE;
2563 }
2564
2565 //IS type will be 0 unless EIS is supported. If EIS is supported
2566 //it could either be 1 or 4 depending on the stream and video size
2567 if (setEis) {
2568 if (!m_bEisSupportedSize) {
2569 is_type = IS_TYPE_DIS;
2570 } else {
2571 is_type = IS_TYPE_EIS_2_0;
2572 }
2573 mStreamConfigInfo.is_type = is_type;
2574 } else {
2575 mStreamConfigInfo.is_type = IS_TYPE_NONE;
2576 }
2577
2578 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2579 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
2580 int32_t tintless_value = 1;
2581 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2582 CAM_INTF_PARM_TINTLESS, tintless_value);
2583 //Disable CDS for HFR mode or if DIS/EIS is on.
2584 //CDS is a session parameter in the backend/ISP, so need to be set/reset
2585 //after every configure_stream
2586 if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
2587 (m_bIsVideo)) {
2588 int32_t cds = CAM_CDS_MODE_OFF;
2589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2590 CAM_INTF_PARM_CDS_MODE, cds))
2591 LOGE("Failed to disable CDS for HFR mode");
2592
2593 }
2594
2595 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
2596 uint8_t* use_av_timer = NULL;
2597
2598 if (m_debug_avtimer){
2599 use_av_timer = &m_debug_avtimer;
2600 }
2601 else{
2602 use_av_timer =
2603 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
2604 }
2605
2606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
2607 rc = BAD_VALUE;
2608 }
2609 }
2610
2611 setMobicat();
2612
2613 /* Set fps and hfr mode while sending meta stream info so that sensor
2614 * can configure appropriate streaming mode */
2615 mHFRVideoFps = DEFAULT_VIDEO_FPS;
2616 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
2617 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2618 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2619 rc = setHalFpsRange(meta, mParameters);
2620 if (rc == NO_ERROR) {
2621 int32_t max_fps =
2622 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
2623 if (mBatchSize) {
2624 /* For HFR, more buffers are dequeued upfront to improve the performance */
2625 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
2626 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
2627 } else if (max_fps == 60) {
2628 /* for 60 fps usecas increase inflight requests */
2629 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
2630 mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
2631 } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
2632 /* for non 60 fps video use cases, set min = max inflight requests to
2633 avoid frame drops due to degraded system performance */
2634 mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
2635 }
2636 }
2637 else {
2638 LOGE("setHalFpsRange failed");
2639 }
2640 }
2641 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
2642
2643
2644 //TODO: validate the arguments, HSV scenemode should have only the
2645 //advertised fps ranges
2646
2647 /*set the capture intent, hal version, tintless, stream info,
2648 *and disenable parameters to the backend*/
2649 LOGD("set_parms META_STREAM_INFO " );
2650 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
2651 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
2652 "Format:%d",
2653 mStreamConfigInfo.type[i],
2654 mStreamConfigInfo.stream_sizes[i].width,
2655 mStreamConfigInfo.stream_sizes[i].height,
2656 mStreamConfigInfo.postprocess_mask[i],
2657 mStreamConfigInfo.format[i]);
2658 }
2659
2660 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2661 mParameters);
2662 if (rc < 0) {
2663 LOGE("set_parms failed for hal version, stream info");
2664 }
2665
2666 cam_dimension_t sensor_dim;
2667 memset(&sensor_dim, 0, sizeof(sensor_dim));
2668 rc = getSensorOutputSize(sensor_dim);
2669 if (rc != NO_ERROR) {
2670 LOGE("Failed to get sensor output size");
2671 pthread_mutex_unlock(&mMutex);
2672 goto error_exit;
2673 }
2674
2675 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
2676 gCamCapability[mCameraId]->active_array_size.height,
2677 sensor_dim.width, sensor_dim.height);
2678
2679 /* Set batchmode before initializing channel. Since registerBuffer
2680 * internally initializes some of the channels, better set batchmode
2681 * even before first register buffer */
2682 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2683 it != mStreamInfo.end(); it++) {
2684 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2685 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
2686 && mBatchSize) {
2687 rc = channel->setBatchSize(mBatchSize);
2688 //Disable per frame map unmap for HFR/batchmode case
2689 rc |= channel->setPerFrameMapUnmap(false);
2690 if (NO_ERROR != rc) {
2691 LOGE("Channel init failed %d", rc);
2692 pthread_mutex_unlock(&mMutex);
2693 goto error_exit;
2694 }
2695 }
2696 }
2697
2698 //First initialize all streams
2699 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2700 it != mStreamInfo.end(); it++) {
2701 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2702 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
2703 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
2704 setEis)
2705 rc = channel->initialize(is_type);
2706 else {
2707 rc = channel->initialize(IS_TYPE_NONE);
2708 }
2709 if (NO_ERROR != rc) {
2710 LOGE("Channel initialization failed %d", rc);
2711 pthread_mutex_unlock(&mMutex);
2712 goto error_exit;
2713 }
2714 }
2715
2716 if (mRawDumpChannel) {
2717 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
2718 if (rc != NO_ERROR) {
2719 LOGE("Error: Raw Dump Channel init failed");
2720 pthread_mutex_unlock(&mMutex);
2721 goto error_exit;
2722 }
2723 }
2724 if (mSupportChannel) {
2725 rc = mSupportChannel->initialize(IS_TYPE_NONE);
2726 if (rc < 0) {
2727 LOGE("Support channel initialization failed");
2728 pthread_mutex_unlock(&mMutex);
2729 goto error_exit;
2730 }
2731 }
2732 if (mAnalysisChannel) {
2733 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
2734 if (rc < 0) {
2735 LOGE("Analysis channel initialization failed");
2736 pthread_mutex_unlock(&mMutex);
2737 goto error_exit;
2738 }
2739 }
2740 if (mDummyBatchChannel) {
2741 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
2742 if (rc < 0) {
2743 LOGE("mDummyBatchChannel setBatchSize failed");
2744 pthread_mutex_unlock(&mMutex);
2745 goto error_exit;
2746 }
2747 rc = mDummyBatchChannel->initialize(is_type);
2748 if (rc < 0) {
2749 LOGE("mDummyBatchChannel initialization failed");
2750 pthread_mutex_unlock(&mMutex);
2751 goto error_exit;
2752 }
2753 }
2754
2755 // Set bundle info
2756 rc = setBundleInfo();
2757 if (rc < 0) {
2758 LOGE("setBundleInfo failed %d", rc);
2759 pthread_mutex_unlock(&mMutex);
2760 goto error_exit;
2761 }
2762
2763 }
2764
2765 pthread_mutex_unlock(&mMutex);
2766
2767 error_exit:
2768
2769 return rc;
2770 }
2771
2772 /*===========================================================================
2773 * FUNCTION : validateCaptureRequest
2774 *
2775 * DESCRIPTION: validate a capture request from camera service
2776 *
2777 * PARAMETERS :
2778 * @request : request from framework to process
2779 *
2780 * RETURN :
2781 *
2782 *==========================================================================*/
validateCaptureRequest(camera3_capture_request_t * request)2783 int QCamera3HardwareInterface::validateCaptureRequest(
2784 camera3_capture_request_t *request)
2785 {
2786 ssize_t idx = 0;
2787 const camera3_stream_buffer_t *b;
2788 CameraMetadata meta;
2789
2790 /* Sanity check the request */
2791 if (request == NULL) {
2792 LOGE("NULL capture request");
2793 return BAD_VALUE;
2794 }
2795
2796 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2797 /*settings cannot be null for the first request*/
2798 return BAD_VALUE;
2799 }
2800
2801 uint32_t frameNumber = request->frame_number;
2802 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2803 LOGE("Request %d: No output buffers provided!",
2804 __FUNCTION__, frameNumber);
2805 return BAD_VALUE;
2806 }
2807 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2808 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2809 request->num_output_buffers, MAX_NUM_STREAMS);
2810 return BAD_VALUE;
2811 }
2812 if (request->input_buffer != NULL) {
2813 b = request->input_buffer;
2814 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2815 LOGE("Request %d: Buffer %ld: Status not OK!",
2816 frameNumber, (long)idx);
2817 return BAD_VALUE;
2818 }
2819 if (b->release_fence != -1) {
2820 LOGE("Request %d: Buffer %ld: Has a release fence!",
2821 frameNumber, (long)idx);
2822 return BAD_VALUE;
2823 }
2824 if (b->buffer == NULL) {
2825 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2826 frameNumber, (long)idx);
2827 return BAD_VALUE;
2828 }
2829 }
2830
2831 // Validate all buffers
2832 b = request->output_buffers;
2833 do {
2834 QCamera3ProcessingChannel *channel =
2835 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2836 if (channel == NULL) {
2837 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2838 frameNumber, (long)idx);
2839 return BAD_VALUE;
2840 }
2841 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2842 LOGE("Request %d: Buffer %ld: Status not OK!",
2843 frameNumber, (long)idx);
2844 return BAD_VALUE;
2845 }
2846 if (b->release_fence != -1) {
2847 LOGE("Request %d: Buffer %ld: Has a release fence!",
2848 frameNumber, (long)idx);
2849 return BAD_VALUE;
2850 }
2851 if (b->buffer == NULL) {
2852 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2853 frameNumber, (long)idx);
2854 return BAD_VALUE;
2855 }
2856 if (*(b->buffer) == NULL) {
2857 LOGE("Request %d: Buffer %ld: NULL private handle!",
2858 frameNumber, (long)idx);
2859 return BAD_VALUE;
2860 }
2861 idx++;
2862 b = request->output_buffers + idx;
2863 } while (idx < (ssize_t)request->num_output_buffers);
2864
2865 return NO_ERROR;
2866 }
2867
2868 /*===========================================================================
2869 * FUNCTION : deriveMinFrameDuration
2870 *
2871 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2872 * on currently configured streams.
2873 *
2874 * PARAMETERS : NONE
2875 *
2876 * RETURN : NONE
2877 *
2878 *==========================================================================*/
deriveMinFrameDuration()2879 void QCamera3HardwareInterface::deriveMinFrameDuration()
2880 {
2881 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2882
2883 maxJpegDim = 0;
2884 maxProcessedDim = 0;
2885 maxRawDim = 0;
2886
2887 // Figure out maximum jpeg, processed, and raw dimensions
2888 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2889 it != mStreamInfo.end(); it++) {
2890
2891 // Input stream doesn't have valid stream_type
2892 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2893 continue;
2894
2895 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2896 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2897 if (dimension > maxJpegDim)
2898 maxJpegDim = dimension;
2899 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2900 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2901 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2902 if (dimension > maxRawDim)
2903 maxRawDim = dimension;
2904 } else {
2905 if (dimension > maxProcessedDim)
2906 maxProcessedDim = dimension;
2907 }
2908 }
2909
2910 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2911 MAX_SIZES_CNT);
2912
2913 //Assume all jpeg dimensions are in processed dimensions.
2914 if (maxJpegDim > maxProcessedDim)
2915 maxProcessedDim = maxJpegDim;
2916 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2917 if (maxProcessedDim > maxRawDim) {
2918 maxRawDim = INT32_MAX;
2919
2920 for (size_t i = 0; i < count; i++) {
2921 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2922 gCamCapability[mCameraId]->raw_dim[i].height;
2923 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2924 maxRawDim = dimension;
2925 }
2926 }
2927
2928 //Find minimum durations for processed, jpeg, and raw
2929 for (size_t i = 0; i < count; i++) {
2930 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2931 gCamCapability[mCameraId]->raw_dim[i].height) {
2932 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2933 break;
2934 }
2935 }
2936 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2937 for (size_t i = 0; i < count; i++) {
2938 if (maxProcessedDim ==
2939 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2940 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2941 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2942 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2943 break;
2944 }
2945 }
2946 }
2947
2948 /*===========================================================================
2949 * FUNCTION : getMinFrameDuration
2950 *
2951 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2952 * and current request configuration.
2953 *
2954 * PARAMETERS : @request: requset sent by the frameworks
2955 *
2956 * RETURN : min farme duration for a particular request
2957 *
2958 *==========================================================================*/
getMinFrameDuration(const camera3_capture_request_t * request)2959 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2960 {
2961 bool hasJpegStream = false;
2962 bool hasRawStream = false;
2963 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2964 const camera3_stream_t *stream = request->output_buffers[i].stream;
2965 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2966 hasJpegStream = true;
2967 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2968 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2969 stream->format == HAL_PIXEL_FORMAT_RAW16)
2970 hasRawStream = true;
2971 }
2972
2973 if (!hasJpegStream)
2974 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2975 else
2976 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2977 }
2978
2979 /*===========================================================================
2980 * FUNCTION : handleBuffersDuringFlushLock
2981 *
2982 * DESCRIPTION: Account for buffers returned from back-end during flush
2983 * This function is executed while mMutex is held by the caller.
2984 *
2985 * PARAMETERS :
2986 * @buffer: image buffer for the callback
2987 *
2988 * RETURN :
2989 *==========================================================================*/
handleBuffersDuringFlushLock(camera3_stream_buffer_t * buffer)2990 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2991 {
2992 bool buffer_found = false;
2993 for (List<PendingBuffersInRequest>::iterator req =
2994 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2995 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2996 for (List<PendingBufferInfo>::iterator i =
2997 req->mPendingBufferList.begin();
2998 i != req->mPendingBufferList.end(); i++) {
2999 if (i->buffer == buffer->buffer) {
3000 mPendingBuffersMap.numPendingBufsAtFlush--;
3001 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3002 buffer->buffer, req->frame_number,
3003 mPendingBuffersMap.numPendingBufsAtFlush);
3004 buffer_found = true;
3005 break;
3006 }
3007 }
3008 if (buffer_found) {
3009 break;
3010 }
3011 }
3012 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3013 //signal the flush()
3014 LOGD("All buffers returned to HAL. Continue flush");
3015 pthread_cond_signal(&mBuffersCond);
3016 }
3017 }
3018
3019
3020 /*===========================================================================
3021 * FUNCTION : handlePendingReprocResults
3022 *
3023 * DESCRIPTION: check and notify on any pending reprocess results
3024 *
3025 * PARAMETERS :
3026 * @frame_number : Pending request frame number
3027 *
3028 * RETURN : int32_t type of status
3029 * NO_ERROR -- success
3030 * none-zero failure code
3031 *==========================================================================*/
handlePendingReprocResults(uint32_t frame_number)3032 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
3033 {
3034 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
3035 j != mPendingReprocessResultList.end(); j++) {
3036 if (j->frame_number == frame_number) {
3037 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
3038
3039 LOGD("Delayed reprocess notify %d",
3040 frame_number);
3041
3042 for (pendingRequestIterator k = mPendingRequestsList.begin();
3043 k != mPendingRequestsList.end(); k++) {
3044
3045 if (k->frame_number == j->frame_number) {
3046 LOGD("Found reprocess frame number %d in pending reprocess List "
3047 "Take it out!!",
3048 k->frame_number);
3049
3050 camera3_capture_result result;
3051 memset(&result, 0, sizeof(camera3_capture_result));
3052 result.frame_number = frame_number;
3053 result.num_output_buffers = 1;
3054 result.output_buffers = &j->buffer;
3055 result.input_buffer = k->input_buffer;
3056 result.result = k->settings;
3057 result.partial_result = PARTIAL_RESULT_COUNT;
3058 mCallbackOps->process_capture_result(mCallbackOps, &result);
3059
3060 erasePendingRequest(k);
3061 break;
3062 }
3063 }
3064 mPendingReprocessResultList.erase(j);
3065 break;
3066 }
3067 }
3068 return NO_ERROR;
3069 }
3070
3071 /*===========================================================================
3072 * FUNCTION : handleBatchMetadata
3073 *
3074 * DESCRIPTION: Handles metadata buffer callback in batch mode
3075 *
3076 * PARAMETERS : @metadata_buf: metadata buffer
3077 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3078 * the meta buf in this method
3079 *
3080 * RETURN :
3081 *
3082 *==========================================================================*/
handleBatchMetadata(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf)3083 void QCamera3HardwareInterface::handleBatchMetadata(
3084 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3085 {
3086 ATRACE_CALL();
3087
3088 if (NULL == metadata_buf) {
3089 LOGE("metadata_buf is NULL");
3090 return;
3091 }
3092 /* In batch mode, the metdata will contain the frame number and timestamp of
3093 * the last frame in the batch. Eg: a batch containing buffers from request
3094 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3095 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3096 * multiple process_capture_results */
3097 metadata_buffer_t *metadata =
3098 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3099 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3100 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3101 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3102 uint32_t frame_number = 0, urgent_frame_number = 0;
3103 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3104 bool invalid_metadata = false;
3105 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3106 size_t loopCount = 1;
3107
3108 int32_t *p_frame_number_valid =
3109 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3110 uint32_t *p_frame_number =
3111 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3112 int64_t *p_capture_time =
3113 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3114 int32_t *p_urgent_frame_number_valid =
3115 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3116 uint32_t *p_urgent_frame_number =
3117 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3118
3119 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3120 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3121 (NULL == p_urgent_frame_number)) {
3122 LOGE("Invalid metadata");
3123 invalid_metadata = true;
3124 } else {
3125 frame_number_valid = *p_frame_number_valid;
3126 last_frame_number = *p_frame_number;
3127 last_frame_capture_time = *p_capture_time;
3128 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3129 last_urgent_frame_number = *p_urgent_frame_number;
3130 }
3131
3132 /* In batchmode, when no video buffers are requested, set_parms are sent
3133 * for every capture_request. The difference between consecutive urgent
3134 * frame numbers and frame numbers should be used to interpolate the
3135 * corresponding frame numbers and time stamps */
3136 pthread_mutex_lock(&mMutex);
3137 if (urgent_frame_number_valid) {
3138 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3139 if(idx < 0) {
3140 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3141 last_urgent_frame_number);
3142 mState = ERROR;
3143 pthread_mutex_unlock(&mMutex);
3144 return;
3145 }
3146 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
3147 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3148 first_urgent_frame_number;
3149
3150 LOGH("urgent_frm: valid: %d frm_num: %d - %d",
3151 urgent_frame_number_valid,
3152 first_urgent_frame_number, last_urgent_frame_number);
3153 }
3154
3155 if (frame_number_valid) {
3156 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3157 if(idx < 0) {
3158 LOGE("Invalid frame number received: %d. Irrecoverable error",
3159 last_frame_number);
3160 mState = ERROR;
3161 pthread_mutex_unlock(&mMutex);
3162 return;
3163 }
3164 first_frame_number = mPendingBatchMap.valueAt(idx);
3165 frameNumDiff = last_frame_number + 1 -
3166 first_frame_number;
3167 mPendingBatchMap.removeItem(last_frame_number);
3168
3169 LOGH("frm: valid: %d frm_num: %d - %d",
3170 frame_number_valid,
3171 first_frame_number, last_frame_number);
3172
3173 }
3174 pthread_mutex_unlock(&mMutex);
3175
3176 if (urgent_frame_number_valid || frame_number_valid) {
3177 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3178 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3179 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3180 urgentFrameNumDiff, last_urgent_frame_number);
3181 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3182 LOGE("frameNumDiff: %d frameNum: %d",
3183 frameNumDiff, last_frame_number);
3184 }
3185
3186 for (size_t i = 0; i < loopCount; i++) {
3187 /* handleMetadataWithLock is called even for invalid_metadata for
3188 * pipeline depth calculation */
3189 if (!invalid_metadata) {
3190 /* Infer frame number. Batch metadata contains frame number of the
3191 * last frame */
3192 if (urgent_frame_number_valid) {
3193 if (i < urgentFrameNumDiff) {
3194 urgent_frame_number =
3195 first_urgent_frame_number + i;
3196 LOGD("inferred urgent frame_number: %d",
3197 urgent_frame_number);
3198 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3199 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3200 } else {
3201 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3202 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3203 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3204 }
3205 }
3206
3207 /* Infer frame number. Batch metadata contains frame number of the
3208 * last frame */
3209 if (frame_number_valid) {
3210 if (i < frameNumDiff) {
3211 frame_number = first_frame_number + i;
3212 LOGD("inferred frame_number: %d", frame_number);
3213 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3214 CAM_INTF_META_FRAME_NUMBER, frame_number);
3215 } else {
3216 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3217 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3218 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3219 }
3220 }
3221
3222 if (last_frame_capture_time) {
3223 //Infer timestamp
3224 first_frame_capture_time = last_frame_capture_time -
3225 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
3226 capture_time =
3227 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
3228 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3229 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3230 LOGH("batch capture_time: %lld, capture_time: %lld",
3231 last_frame_capture_time, capture_time);
3232 }
3233 }
3234 pthread_mutex_lock(&mMutex);
3235 handleMetadataWithLock(metadata_buf,
3236 false /* free_and_bufdone_meta_buf */,
3237 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3238 (i == frameNumDiff-1) /* last metadata in the batch metadata */);
3239 pthread_mutex_unlock(&mMutex);
3240 }
3241
3242 /* BufDone metadata buffer */
3243 if (free_and_bufdone_meta_buf) {
3244 mMetadataChannel->bufDone(metadata_buf);
3245 free(metadata_buf);
3246 }
3247 }
3248
notifyError(uint32_t frameNumber,camera3_error_msg_code_t errorCode)3249 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3250 camera3_error_msg_code_t errorCode)
3251 {
3252 camera3_notify_msg_t notify_msg;
3253 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3254 notify_msg.type = CAMERA3_MSG_ERROR;
3255 notify_msg.message.error.error_code = errorCode;
3256 notify_msg.message.error.error_stream = NULL;
3257 notify_msg.message.error.frame_number = frameNumber;
3258 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3259
3260 return;
3261 }
3262
3263 /*===========================================================================
3264 * FUNCTION : handleMetadataWithLock
3265 *
3266 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3267 *
3268 * PARAMETERS : @metadata_buf: metadata buffer
3269 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3270 * the meta buf in this method
3271 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3272 * last urgent metadata in a batch. Always true for non-batch mode
3273 * @lastMetadataInBatch: Boolean to indicate whether this is the
3274 * last metadata in a batch. Always true for non-batch mode
3275 *
3276 * RETURN :
3277 *
3278 *==========================================================================*/
handleMetadataWithLock(mm_camera_super_buf_t * metadata_buf,bool free_and_bufdone_meta_buf,bool lastUrgentMetadataInBatch,bool lastMetadataInBatch)3279 void QCamera3HardwareInterface::handleMetadataWithLock(
3280 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3281 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
3282 {
3283 ATRACE_CALL();
3284 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3285 //during flush do not send metadata from this thread
3286 LOGD("not sending metadata during flush or when mState is error");
3287 if (free_and_bufdone_meta_buf) {
3288 mMetadataChannel->bufDone(metadata_buf);
3289 free(metadata_buf);
3290 }
3291 return;
3292 }
3293
3294 //not in flush
3295 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3296 int32_t frame_number_valid, urgent_frame_number_valid;
3297 uint32_t frame_number, urgent_frame_number;
3298 int64_t capture_time, capture_time_av;
3299 nsecs_t currentSysTime;
3300
3301 int32_t *p_frame_number_valid =
3302 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3303 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3304 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3305 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3306 int32_t *p_urgent_frame_number_valid =
3307 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3308 uint32_t *p_urgent_frame_number =
3309 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3310 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3311 metadata) {
3312 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3313 *p_frame_number_valid, *p_frame_number);
3314 }
3315
3316 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3317 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3318 LOGE("Invalid metadata");
3319 if (free_and_bufdone_meta_buf) {
3320 mMetadataChannel->bufDone(metadata_buf);
3321 free(metadata_buf);
3322 }
3323 goto done_metadata;
3324 }
3325 frame_number_valid = *p_frame_number_valid;
3326 frame_number = *p_frame_number;
3327 capture_time = *p_capture_time;
3328 capture_time_av = *p_capture_time_av;
3329 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3330 urgent_frame_number = *p_urgent_frame_number;
3331 currentSysTime = systemTime(CLOCK_MONOTONIC);
3332
3333 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3334 const int tries = 3;
3335 nsecs_t bestGap, measured;
3336 for (int i = 0; i < tries; ++i) {
3337 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3338 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3339 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3340 const nsecs_t gap = tmono2 - tmono;
3341 if (i == 0 || gap < bestGap) {
3342 bestGap = gap;
3343 measured = tbase - ((tmono + tmono2) >> 1);
3344 }
3345 }
3346 capture_time -= measured;
3347 }
3348
3349 // Detect if buffers from any requests are overdue
3350 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3351 if ( (currentSysTime - req.timestamp) >
3352 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
3353 for (auto &missed : req.mPendingBufferList) {
3354 assert(missed.stream->priv);
3355 if (missed.stream->priv) {
3356 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3357 assert(ch->mStreams[0]);
3358 if (ch->mStreams[0]) {
3359 LOGW("Missing: frame = %d, buffer = %p,"
3360 "stream type = %d, stream format = %d",
3361 req.frame_number, missed.buffer,
3362 ch->mStreams[0]->getMyType(), missed.stream->format);
3363 ch->timeoutFrame(req.frame_number);
3364 }
3365 }
3366 }
3367 }
3368 }
3369 //Partial result on process_capture_result for timestamp
3370 if (urgent_frame_number_valid) {
3371 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3372 urgent_frame_number, capture_time);
3373
3374 //Recieved an urgent Frame Number, handle it
3375 //using partial results
3376 for (pendingRequestIterator i =
3377 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3378 LOGD("Iterator Frame = %d urgent frame = %d",
3379 i->frame_number, urgent_frame_number);
3380
3381 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3382 (i->partial_result_cnt == 0)) {
3383 LOGE("Error: HAL missed urgent metadata for frame number %d",
3384 i->frame_number);
3385 i->partialResultDropped = true;
3386 i->partial_result_cnt++;
3387 }
3388
3389 if (i->frame_number == urgent_frame_number &&
3390 i->bUrgentReceived == 0) {
3391
3392 camera3_capture_result_t result;
3393 memset(&result, 0, sizeof(camera3_capture_result_t));
3394
3395 i->partial_result_cnt++;
3396 i->bUrgentReceived = 1;
3397 // Extract 3A metadata
3398 result.result = translateCbUrgentMetadataToResultMetadata(
3399 metadata, lastUrgentMetadataInBatch, urgent_frame_number);
3400 // Populate metadata result
3401 result.frame_number = urgent_frame_number;
3402 result.num_output_buffers = 0;
3403 result.output_buffers = NULL;
3404 result.partial_result = i->partial_result_cnt;
3405
3406 mCallbackOps->process_capture_result(mCallbackOps, &result);
3407 LOGD("urgent frame_number = %u, capture_time = %lld",
3408 result.frame_number, capture_time);
3409 free_camera_metadata((camera_metadata_t *)result.result);
3410 break;
3411 }
3412 }
3413 }
3414
3415 if (!frame_number_valid) {
3416 LOGD("Not a valid normal frame number, used as SOF only");
3417 if (free_and_bufdone_meta_buf) {
3418 mMetadataChannel->bufDone(metadata_buf);
3419 free(metadata_buf);
3420 }
3421 goto done_metadata;
3422 }
3423 LOGH("valid frame_number = %u, capture_time = %lld",
3424 frame_number, capture_time);
3425
3426 for (pendingRequestIterator i = mPendingRequestsList.begin();
3427 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3428 // Flush out all entries with less or equal frame numbers.
3429
3430 camera3_capture_result_t result;
3431 memset(&result, 0, sizeof(camera3_capture_result_t));
3432
3433 LOGD("frame_number in the list is %u", i->frame_number);
3434 i->partial_result_cnt++;
3435 result.partial_result = i->partial_result_cnt;
3436
3437 // Check whether any stream buffer corresponding to this is dropped or not
3438 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3439 // The API does not expect a blob buffer to be dropped
3440 if (p_cam_frame_drop) {
3441 /* Clear notify_msg structure */
3442 camera3_notify_msg_t notify_msg;
3443 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3444 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3445 j != i->buffers.end(); j++) {
3446 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3447 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3448 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3449 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3450 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3451 LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3452 __func__, i->frame_number, streamID, j->stream->format);
3453 notify_msg.type = CAMERA3_MSG_ERROR;
3454 notify_msg.message.error.frame_number = i->frame_number;
3455 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3456 notify_msg.message.error.error_stream = j->stream;
3457 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3458 LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3459 __func__, i->frame_number, streamID, j->stream->format);
3460 PendingFrameDropInfo PendingFrameDrop;
3461 PendingFrameDrop.frame_number=i->frame_number;
3462 PendingFrameDrop.stream_ID = streamID;
3463 // Add the Frame drop info to mPendingFrameDropList
3464 mPendingFrameDropList.push_back(PendingFrameDrop);
3465 }
3466 }
3467 }
3468 }
3469
3470 // Send empty metadata with already filled buffers for dropped metadata
3471 // and send valid metadata with already filled buffers for current metadata
3472 /* we could hit this case when we either
3473 * 1. have a pending reprocess request or
3474 * 2. miss a metadata buffer callback */
3475 bool errorResult = false;
3476 if (i->frame_number < frame_number) {
3477 if (i->input_buffer) {
3478 /* this will be handled in handleInputBufferWithLock */
3479 i++;
3480 continue;
3481 } else {
3482 mPendingLiveRequest--;
3483 errorResult = true;
3484 }
3485 } else {
3486 mPendingLiveRequest--;
3487 /* Clear notify_msg structure */
3488 camera3_notify_msg_t notify_msg;
3489 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3490
3491 // Send shutter notify to frameworks
3492 notify_msg.type = CAMERA3_MSG_SHUTTER;
3493 notify_msg.message.shutter.frame_number = i->frame_number;
3494 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3495 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3496
3497 errorResult = i->partialResultDropped;
3498
3499 i->timestamp = capture_time;
3500
3501 /* Set the timestamp in display metadata so that clients aware of
3502 private_handle such as VT can use this un-modified timestamps.
3503 Camera framework is unaware of this timestamp and cannot change this */
3504 updateTimeStampInPendingBuffers(i->frame_number, capture_time_av);
3505
3506 // Find channel requiring metadata, meaning internal offline postprocess
3507 // is needed.
3508 //TODO: for now, we don't support two streams requiring metadata at the same time.
3509 // (because we are not making copies, and metadata buffer is not reference counted.
3510 bool internalPproc = false;
3511 for (pendingBufferIterator iter = i->buffers.begin();
3512 iter != i->buffers.end(); iter++) {
3513 if (iter->need_metadata) {
3514 internalPproc = true;
3515 QCamera3ProcessingChannel *channel =
3516 (QCamera3ProcessingChannel *)iter->stream->priv;
3517
3518 if (iter->need_crop) {
3519 QCamera3Stream *stream = channel->getStreamByIndex(0);
3520
3521 // Map the EIS crop to respective stream crop and append it.
3522 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA,
3523 metadata) {
3524 for (int j = 0; j < crop_data->num_of_streams; j++) {
3525 if ((stream != nullptr) &&
3526 (stream->getMyServerID() ==
3527 crop_data->crop_info[j].stream_id)) {
3528
3529 cam_dimension_t streamDim;
3530 if (stream->getFrameDimension(streamDim) != NO_ERROR) {
3531 LOGE("%s: Failed obtaining stream dimensions!", __func__);
3532 continue;
3533 }
3534
3535 mStreamCropMapper.update(
3536 gCamCapability[mCameraId]->active_array_size.width,
3537 gCamCapability[mCameraId]->active_array_size.height,
3538 streamDim.width, streamDim.height);
3539
3540 cam_eis_crop_info_t eisCrop = iter->crop_info;
3541 mStreamCropMapper.toSensor(eisCrop.delta_x, eisCrop.delta_y,
3542 eisCrop.delta_width, eisCrop.delta_height);
3543
3544 int32_t crop[4] = {
3545 crop_data->crop_info[j].crop.left + eisCrop.delta_x,
3546 crop_data->crop_info[j].crop.top + eisCrop.delta_y,
3547 crop_data->crop_info[j].crop.width - eisCrop.delta_width,
3548 crop_data->crop_info[j].crop.height - eisCrop.delta_height
3549 };
3550
3551 if (isCropValid(crop[0], crop[1], crop[2], crop[3],
3552 streamDim.width, streamDim.height)) {
3553 crop_data->crop_info[j].crop.left = crop[0];
3554 crop_data->crop_info[j].crop.top = crop[1];
3555 crop_data->crop_info[j].crop.width = crop[2];
3556 crop_data->crop_info[j].crop.height = crop[3];
3557 } else {
3558 LOGE("Invalid EIS compensated crop region");
3559 }
3560
3561 break;
3562 }
3563 }
3564 }
3565 }
3566
3567 channel->queueReprocMetadata(metadata_buf);
3568 break;
3569 }
3570 }
3571
3572 // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3573 result.result = translateFromHalMetadata(metadata,
3574 *i, internalPproc, lastMetadataInBatch);
3575 // atrace_end(ATRACE_TAG_ALWAYS);
3576
3577 saveExifParams(metadata);
3578
3579 if (i->blob_request) {
3580 {
3581 //Dump tuning metadata if enabled and available
3582 char prop[PROPERTY_VALUE_MAX];
3583 memset(prop, 0, sizeof(prop));
3584 property_get("persist.camera.dumpmetadata", prop, "0");
3585 int32_t enabled = atoi(prop);
3586 if (enabled && metadata->is_tuning_params_valid) {
3587 dumpMetadataToFile(metadata->tuning_params,
3588 mMetaFrameCount,
3589 enabled,
3590 "Snapshot",
3591 frame_number);
3592 }
3593 }
3594 }
3595
3596 if (!internalPproc) {
3597 LOGD("couldn't find need_metadata for this metadata");
3598 // Return metadata buffer
3599 if (free_and_bufdone_meta_buf) {
3600 mMetadataChannel->bufDone(metadata_buf);
3601 free(metadata_buf);
3602 }
3603 }
3604 }
3605 if (errorResult) {
3606 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3607 }
3608
3609 if (!errorResult && !result.result) {
3610 LOGE("metadata is NULL");
3611 }
3612 result.frame_number = i->frame_number;
3613 result.input_buffer = i->input_buffer;
3614 result.num_output_buffers = 0;
3615 result.output_buffers = NULL;
3616 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3617 j != i->buffers.end(); j++) {
3618 if (j->buffer) {
3619 result.num_output_buffers++;
3620 }
3621 }
3622
3623 updateFpsInPreviewBuffer(metadata, i->frame_number);
3624
3625 if (result.num_output_buffers > 0) {
3626 camera3_stream_buffer_t *result_buffers =
3627 new camera3_stream_buffer_t[result.num_output_buffers];
3628 if (result_buffers != NULL) {
3629 size_t result_buffers_idx = 0;
3630 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3631 j != i->buffers.end(); j++) {
3632 if (j->buffer) {
3633 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3634 m != mPendingFrameDropList.end(); m++) {
3635 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3636 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3637 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3638 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3639 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3640 frame_number, streamID);
3641 m = mPendingFrameDropList.erase(m);
3642 break;
3643 }
3644 }
3645 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3646 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3647 result_buffers[result_buffers_idx++] = *(j->buffer);
3648 free(j->buffer);
3649 j->buffer = NULL;
3650 }
3651 }
3652
3653 result.output_buffers = result_buffers;
3654 mCallbackOps->process_capture_result(mCallbackOps, &result);
3655 LOGD("meta frame_number = %u, capture_time = %lld",
3656 result.frame_number, i->timestamp);
3657 delete[] result_buffers;
3658 }else {
3659 LOGE("Fatal error: out of memory");
3660 }
3661 } else if (!errorResult) {
3662 mCallbackOps->process_capture_result(mCallbackOps, &result);
3663 LOGD("meta frame_number = %u, capture_time = %lld",
3664 result.frame_number, i->timestamp);
3665 }
3666
3667 if (result.result) {
3668 free_camera_metadata((camera_metadata_t *)result.result);
3669 }
3670 i = erasePendingRequest(i);
3671
3672 if (!mPendingReprocessResultList.empty()) {
3673 handlePendingReprocResults(frame_number + 1);
3674 }
3675 }
3676
3677 done_metadata:
3678 for (pendingRequestIterator i = mPendingRequestsList.begin();
3679 i != mPendingRequestsList.end() ;i++) {
3680 i->pipeline_depth++;
3681 }
3682 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3683 unblockRequestIfNecessary();
3684 }
3685
3686 /*===========================================================================
3687 * FUNCTION : hdrPlusPerfLock
3688 *
3689 * DESCRIPTION: perf lock for HDR+ using custom intent
3690 *
3691 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3692 *
3693 * RETURN : None
3694 *
3695 *==========================================================================*/
hdrPlusPerfLock(mm_camera_super_buf_t * metadata_buf)3696 void QCamera3HardwareInterface::hdrPlusPerfLock(
3697 mm_camera_super_buf_t *metadata_buf)
3698 {
3699 if (NULL == metadata_buf) {
3700 LOGE("metadata_buf is NULL");
3701 return;
3702 }
3703 metadata_buffer_t *metadata =
3704 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3705 int32_t *p_frame_number_valid =
3706 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3707 uint32_t *p_frame_number =
3708 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3709
3710 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3711 LOGE("%s: Invalid metadata", __func__);
3712 return;
3713 }
3714
3715 //acquire perf lock for 5 sec after the last HDR frame is captured
3716 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3717 if ((p_frame_number != NULL) &&
3718 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3719 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3720 }
3721 }
3722
3723 //release lock after perf lock timer is expired. If lock is already released,
3724 //isTimerReset returns false
3725 if (m_perfLock.isTimerReset()) {
3726 mLastCustIntentFrmNum = -1;
3727 m_perfLock.lock_rel_timed();
3728 }
3729 }
3730
3731 /*===========================================================================
3732 * FUNCTION : handleInputBufferWithLock
3733 *
3734 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3735 *
3736 * PARAMETERS : @frame_number: frame number of the input buffer
3737 *
3738 * RETURN :
3739 *
3740 *==========================================================================*/
handleInputBufferWithLock(uint32_t frame_number)3741 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3742 {
3743 ATRACE_CALL();
3744 pendingRequestIterator i = mPendingRequestsList.begin();
3745 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3746 i++;
3747 }
3748 if (i != mPendingRequestsList.end() && i->input_buffer) {
3749 //found the right request
3750 if (!i->shutter_notified) {
3751 CameraMetadata settings;
3752 camera3_notify_msg_t notify_msg;
3753 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3754 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3755 if(i->settings) {
3756 settings = i->settings;
3757 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3758 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3759 } else {
3760 LOGE("No timestamp in input settings! Using current one.");
3761 }
3762 } else {
3763 LOGE("Input settings missing!");
3764 }
3765
3766 notify_msg.type = CAMERA3_MSG_SHUTTER;
3767 notify_msg.message.shutter.frame_number = frame_number;
3768 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3769 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3770 i->shutter_notified = true;
3771 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3772 i->frame_number, notify_msg.message.shutter.timestamp);
3773 }
3774
3775 if (i->input_buffer->release_fence != -1) {
3776 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3777 close(i->input_buffer->release_fence);
3778 if (rc != OK) {
3779 LOGE("input buffer sync wait failed %d", rc);
3780 }
3781 }
3782
3783 camera3_capture_result result;
3784 memset(&result, 0, sizeof(camera3_capture_result));
3785 result.frame_number = frame_number;
3786 result.result = i->settings;
3787 result.input_buffer = i->input_buffer;
3788 result.partial_result = PARTIAL_RESULT_COUNT;
3789
3790 mCallbackOps->process_capture_result(mCallbackOps, &result);
3791 LOGD("Input request metadata and input buffer frame_number = %u",
3792 i->frame_number);
3793 i = erasePendingRequest(i);
3794 } else {
3795 LOGE("Could not find input request for frame number %d", frame_number);
3796 }
3797 }
3798
3799 /*===========================================================================
3800 * FUNCTION : handleBufferWithLock
3801 *
3802 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3803 *
3804 * PARAMETERS : @buffer: image buffer for the callback
3805 * @frame_number: frame number of the image buffer
3806 *
3807 * RETURN :
3808 *
3809 *==========================================================================*/
handleBufferWithLock(camera3_stream_buffer_t * buffer,uint32_t frame_number)3810 void QCamera3HardwareInterface::handleBufferWithLock(
3811 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3812 {
3813 ATRACE_CALL();
3814 /* Nothing to be done during error state */
3815 if ((ERROR == mState) || (DEINIT == mState)) {
3816 return;
3817 }
3818 if (mFlushPerf) {
3819 handleBuffersDuringFlushLock(buffer);
3820 return;
3821 }
3822 //not in flush
3823 // If the frame number doesn't exist in the pending request list,
3824 // directly send the buffer to the frameworks, and update pending buffers map
3825 // Otherwise, book-keep the buffer.
3826 pendingRequestIterator i = mPendingRequestsList.begin();
3827 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3828 i++;
3829 }
3830 if (i == mPendingRequestsList.end()) {
3831 // Verify all pending requests frame_numbers are greater
3832 for (pendingRequestIterator j = mPendingRequestsList.begin();
3833 j != mPendingRequestsList.end(); j++) {
3834 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3835 LOGW("Error: pending live frame number %d is smaller than %d",
3836 j->frame_number, frame_number);
3837 }
3838 }
3839 camera3_capture_result_t result;
3840 memset(&result, 0, sizeof(camera3_capture_result_t));
3841 result.result = NULL;
3842 result.frame_number = frame_number;
3843 result.num_output_buffers = 1;
3844 result.partial_result = 0;
3845 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3846 m != mPendingFrameDropList.end(); m++) {
3847 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3848 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3849 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3850 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3851 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3852 frame_number, streamID);
3853 m = mPendingFrameDropList.erase(m);
3854 break;
3855 }
3856 }
3857 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3858 result.output_buffers = buffer;
3859 LOGH("result frame_number = %d, buffer = %p",
3860 frame_number, buffer->buffer);
3861
3862 mPendingBuffersMap.removeBuf(buffer->buffer);
3863
3864 mCallbackOps->process_capture_result(mCallbackOps, &result);
3865 } else {
3866 if (i->input_buffer) {
3867 CameraMetadata settings;
3868 camera3_notify_msg_t notify_msg;
3869 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
3870 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3871 if(i->settings) {
3872 settings = i->settings;
3873 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3874 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3875 } else {
3876 LOGW("No timestamp in input settings! Using current one.");
3877 }
3878 } else {
3879 LOGE("Input settings missing!");
3880 }
3881
3882 notify_msg.type = CAMERA3_MSG_SHUTTER;
3883 notify_msg.message.shutter.frame_number = frame_number;
3884 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3885
3886 if (i->input_buffer->release_fence != -1) {
3887 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3888 close(i->input_buffer->release_fence);
3889 if (rc != OK) {
3890 LOGE("input buffer sync wait failed %d", rc);
3891 }
3892 }
3893 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3894 mPendingBuffersMap.removeBuf(buffer->buffer);
3895
3896 bool notifyNow = true;
3897 for (pendingRequestIterator j = mPendingRequestsList.begin();
3898 j != mPendingRequestsList.end(); j++) {
3899 if (j->frame_number < frame_number) {
3900 notifyNow = false;
3901 break;
3902 }
3903 }
3904
3905 if (notifyNow) {
3906 camera3_capture_result result;
3907 memset(&result, 0, sizeof(camera3_capture_result));
3908 result.frame_number = frame_number;
3909 result.result = i->settings;
3910 result.input_buffer = i->input_buffer;
3911 result.num_output_buffers = 1;
3912 result.output_buffers = buffer;
3913 result.partial_result = PARTIAL_RESULT_COUNT;
3914
3915 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
3916 mCallbackOps->process_capture_result(mCallbackOps, &result);
3917 LOGD("Notify reprocess now %d!", frame_number);
3918 i = erasePendingRequest(i);
3919 } else {
3920 // Cache reprocess result for later
3921 PendingReprocessResult pendingResult;
3922 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3923 pendingResult.notify_msg = notify_msg;
3924 pendingResult.buffer = *buffer;
3925 pendingResult.frame_number = frame_number;
3926 mPendingReprocessResultList.push_back(pendingResult);
3927 LOGD("Cache reprocess result %d!", frame_number);
3928 }
3929 } else {
3930 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3931 j != i->buffers.end(); j++) {
3932 if (j->stream == buffer->stream) {
3933 if (j->buffer != NULL) {
3934 LOGE("Error: buffer is already set");
3935 } else {
3936 j->buffer = (camera3_stream_buffer_t *)malloc(
3937 sizeof(camera3_stream_buffer_t));
3938 *(j->buffer) = *buffer;
3939 LOGH("cache buffer %p at result frame_number %u",
3940 buffer->buffer, frame_number);
3941 }
3942 }
3943 }
3944 }
3945 }
3946 }
3947
3948 /*===========================================================================
3949 * FUNCTION : unblockRequestIfNecessary
3950 *
3951 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3952 * that mMutex is held when this function is called.
3953 *
3954 * PARAMETERS :
3955 *
3956 * RETURN :
3957 *
3958 *==========================================================================*/
unblockRequestIfNecessary()3959 void QCamera3HardwareInterface::unblockRequestIfNecessary()
3960 {
3961 // Unblock process_capture_request
3962 pthread_cond_signal(&mRequestCond);
3963 }
3964
3965
3966 /*===========================================================================
3967 * FUNCTION : processCaptureRequest
3968 *
3969 * DESCRIPTION: process a capture request from camera service
3970 *
3971 * PARAMETERS :
3972 * @request : request from framework to process
3973 *
3974 * RETURN :
3975 *
3976 *==========================================================================*/
processCaptureRequest(camera3_capture_request_t * request)3977 int QCamera3HardwareInterface::processCaptureRequest(
3978 camera3_capture_request_t *request)
3979 {
3980 ATRACE_CALL();
3981 int rc = NO_ERROR;
3982 int32_t request_id;
3983 CameraMetadata meta;
3984 bool isVidBufRequested = false;
3985 camera3_stream_buffer_t *pInputBuffer = NULL;
3986
3987 pthread_mutex_lock(&mMutex);
3988
3989 // Validate current state
3990 switch (mState) {
3991 case CONFIGURED:
3992 case STARTED:
3993 /* valid state */
3994 break;
3995
3996 case ERROR:
3997 pthread_mutex_unlock(&mMutex);
3998 handleCameraDeviceError();
3999 return -ENODEV;
4000
4001 default:
4002 LOGE("Invalid state %d", mState);
4003 pthread_mutex_unlock(&mMutex);
4004 return -ENODEV;
4005 }
4006
4007 rc = validateCaptureRequest(request);
4008 if (rc != NO_ERROR) {
4009 LOGE("incoming request is not valid");
4010 pthread_mutex_unlock(&mMutex);
4011 return rc;
4012 }
4013
4014 meta = request->settings;
4015
4016 // For first capture request, send capture intent, and
4017 // stream on all streams
4018 if (mState == CONFIGURED) {
4019 m_perfLock.lock_acq();
4020 //update settings from app here
4021 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4022 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4023 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4024 }
4025 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4026 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4027 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4028 }
4029 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4030 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4031 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4032
4033 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4034 (mLinkedCameraId != mCameraId) ) {
4035 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4036 mLinkedCameraId, mCameraId);
4037 pthread_mutex_unlock(&mMutex);
4038 goto error_exit;
4039 }
4040 }
4041
4042 // add bundle related cameras
4043 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4044 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4045 if (mIsDeviceLinked)
4046 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4047 else
4048 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4049
4050 pthread_mutex_lock(&gCamLock);
4051
4052 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4053 LOGE("Dualcam: Invalid Session Id ");
4054 pthread_mutex_unlock(&gCamLock);
4055 pthread_mutex_unlock(&mMutex);
4056 goto error_exit;
4057 }
4058
4059 if (mIsMainCamera == 1) {
4060 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4061 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4062 // related session id should be session id of linked session
4063 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4064 } else {
4065 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4066 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4067 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4068 }
4069 pthread_mutex_unlock(&gCamLock);
4070
4071 rc = mCameraHandle->ops->sync_related_sensors(
4072 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4073 if (rc < 0) {
4074 LOGE("Dualcam: link failed");
4075 pthread_mutex_unlock(&mMutex);
4076 goto error_exit;
4077 }
4078 }
4079
4080 //Then start them.
4081 LOGH("Start META Channel");
4082 rc = mMetadataChannel->start();
4083 if (rc < 0) {
4084 LOGE("META channel start failed");
4085 pthread_mutex_unlock(&mMutex);
4086 goto error_exit;
4087 }
4088
4089 if (mAnalysisChannel) {
4090 rc = mAnalysisChannel->start();
4091 if (rc < 0) {
4092 LOGE("Analysis channel start failed");
4093 mMetadataChannel->stop();
4094 pthread_mutex_unlock(&mMutex);
4095 goto error_exit;
4096 }
4097 }
4098
4099 if (mSupportChannel) {
4100 rc = mSupportChannel->start();
4101 if (rc < 0) {
4102 LOGE("Support channel start failed");
4103 mMetadataChannel->stop();
4104 /* Although support and analysis are mutually exclusive today
4105 adding it in anycase for future proofing */
4106 if (mAnalysisChannel) {
4107 mAnalysisChannel->stop();
4108 }
4109 pthread_mutex_unlock(&mMutex);
4110 goto error_exit;
4111 }
4112 }
4113 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4114 it != mStreamInfo.end(); it++) {
4115 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4116 LOGH("Start Processing Channel mask=%d",
4117 channel->getStreamTypeMask());
4118 rc = channel->start();
4119 if (rc < 0) {
4120 LOGE("channel start failed");
4121 pthread_mutex_unlock(&mMutex);
4122 goto error_exit;
4123 }
4124 }
4125
4126 if (mRawDumpChannel) {
4127 LOGD("Starting raw dump stream");
4128 rc = mRawDumpChannel->start();
4129 if (rc != NO_ERROR) {
4130 LOGE("Error Starting Raw Dump Channel");
4131 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4132 it != mStreamInfo.end(); it++) {
4133 QCamera3Channel *channel =
4134 (QCamera3Channel *)(*it)->stream->priv;
4135 LOGH("Stopping Processing Channel mask=%d",
4136 channel->getStreamTypeMask());
4137 channel->stop();
4138 }
4139 if (mSupportChannel)
4140 mSupportChannel->stop();
4141 if (mAnalysisChannel) {
4142 mAnalysisChannel->stop();
4143 }
4144 mMetadataChannel->stop();
4145 pthread_mutex_unlock(&mMutex);
4146 goto error_exit;
4147 }
4148 }
4149
4150 if (mChannelHandle) {
4151
4152 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4153 mChannelHandle);
4154 if (rc != NO_ERROR) {
4155 LOGE("start_channel failed %d", rc);
4156 pthread_mutex_unlock(&mMutex);
4157 goto error_exit;
4158 }
4159 }
4160
4161 goto no_error;
4162 error_exit:
4163 m_perfLock.lock_rel();
4164 return rc;
4165 no_error:
4166 m_perfLock.lock_rel();
4167
4168 mWokenUpByDaemon = false;
4169 mPendingLiveRequest = 0;
4170 mFirstConfiguration = false;
4171 enablePowerHint();
4172 }
4173
4174 uint32_t frameNumber = request->frame_number;
4175 cam_stream_ID_t streamsArray;
4176
4177 if (mFlushPerf) {
4178 //we cannot accept any requests during flush
4179 LOGE("process_capture_request cannot proceed during flush");
4180 pthread_mutex_unlock(&mMutex);
4181 return NO_ERROR; //should return an error
4182 }
4183
4184 if (meta.exists(ANDROID_REQUEST_ID)) {
4185 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4186 mCurrentRequestId = request_id;
4187 LOGD("Received request with id: %d", request_id);
4188 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4189 LOGE("Unable to find request id field, \
4190 & no previous id available");
4191 pthread_mutex_unlock(&mMutex);
4192 return NAME_NOT_FOUND;
4193 } else {
4194 LOGD("Re-using old request id");
4195 request_id = mCurrentRequestId;
4196 }
4197
4198 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4199 request->num_output_buffers,
4200 request->input_buffer,
4201 frameNumber);
4202 // Acquire all request buffers first
4203 streamsArray.num_streams = 0;
4204 int blob_request = 0;
4205 uint32_t snapshotStreamId = 0;
4206 for (size_t i = 0; i < request->num_output_buffers; i++) {
4207 const camera3_stream_buffer_t& output = request->output_buffers[i];
4208 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4209
4210 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4211 //Call function to store local copy of jpeg data for encode params.
4212 blob_request = 1;
4213 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4214 }
4215
4216 if (output.acquire_fence != -1) {
4217 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4218 close(output.acquire_fence);
4219 if (rc != OK) {
4220 LOGE("sync wait failed %d", rc);
4221 pthread_mutex_unlock(&mMutex);
4222 return rc;
4223 }
4224 }
4225
4226 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4227 channel->getStreamID(channel->getStreamTypeMask());
4228
4229 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4230 isVidBufRequested = true;
4231 }
4232 }
4233
4234 if (blob_request) {
4235 KPI_ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
4236 }
4237 if (blob_request && mRawDumpChannel) {
4238 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4239 streamsArray.stream_request[streamsArray.num_streams].streamID =
4240 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4241 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4242 }
4243
4244 if(request->input_buffer == NULL) {
4245 /* Parse the settings:
4246 * - For every request in NORMAL MODE
4247 * - For every request in HFR mode during preview only case
4248 * - For first request of every batch in HFR mode during video
4249 * recording. In batchmode the same settings except frame number is
4250 * repeated in each request of the batch.
4251 */
4252 if (!mBatchSize ||
4253 (mBatchSize && !isVidBufRequested) ||
4254 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4255 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4256 if (rc < 0) {
4257 LOGE("fail to set frame parameters");
4258 pthread_mutex_unlock(&mMutex);
4259 return rc;
4260 }
4261 }
4262 /* For batchMode HFR, setFrameParameters is not called for every
4263 * request. But only frame number of the latest request is parsed.
4264 * Keep track of first and last frame numbers in a batch so that
4265 * metadata for the frame numbers of batch can be duplicated in
4266 * handleBatchMetadta */
4267 if (mBatchSize) {
4268 if (!mToBeQueuedVidBufs) {
4269 //start of the batch
4270 mFirstFrameNumberInBatch = request->frame_number;
4271 }
4272 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4273 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4274 LOGE("Failed to set the frame number in the parameters");
4275 pthread_mutex_unlock(&mMutex);
4276 return BAD_VALUE;
4277 }
4278 }
4279 if (mNeedSensorRestart) {
4280 /* Unlock the mutex as restartSensor waits on the channels to be
4281 * stopped, which in turn calls stream callback functions -
4282 * handleBufferWithLock and handleMetadataWithLock */
4283 pthread_mutex_unlock(&mMutex);
4284 rc = dynamicUpdateMetaStreamInfo();
4285 if (rc != NO_ERROR) {
4286 LOGE("Restarting the sensor failed");
4287 return BAD_VALUE;
4288 }
4289 mNeedSensorRestart = false;
4290 pthread_mutex_lock(&mMutex);
4291 }
4292 } else {
4293
4294 if (request->input_buffer->acquire_fence != -1) {
4295 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4296 close(request->input_buffer->acquire_fence);
4297 if (rc != OK) {
4298 LOGE("input buffer sync wait failed %d", rc);
4299 pthread_mutex_unlock(&mMutex);
4300 return rc;
4301 }
4302 }
4303 }
4304
4305 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4306 mLastCustIntentFrmNum = frameNumber;
4307 }
4308 /* Update pending request list and pending buffers map */
4309 PendingRequestInfo pendingRequest = {};
4310 pendingRequestIterator latestRequest;
4311 pendingRequest.frame_number = frameNumber;
4312 pendingRequest.num_buffers = request->num_output_buffers;
4313 pendingRequest.request_id = request_id;
4314 pendingRequest.blob_request = blob_request;
4315 pendingRequest.timestamp = 0;
4316 pendingRequest.bUrgentReceived = 0;
4317 if (request->input_buffer) {
4318 pendingRequest.input_buffer =
4319 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4320 *(pendingRequest.input_buffer) = *(request->input_buffer);
4321 pInputBuffer = pendingRequest.input_buffer;
4322 } else {
4323 pendingRequest.input_buffer = NULL;
4324 pInputBuffer = NULL;
4325 }
4326
4327 pendingRequest.pipeline_depth = 0;
4328 pendingRequest.partial_result_cnt = 0;
4329 extractJpegMetadata(mCurJpegMeta, request);
4330 pendingRequest.jpegMetadata = mCurJpegMeta;
4331 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4332 pendingRequest.shutter_notified = false;
4333
4334 //extract capture intent
4335 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4336 mCaptureIntent =
4337 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4338 }
4339 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4340 mHybridAeEnable =
4341 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4342 }
4343 pendingRequest.capture_intent = mCaptureIntent;
4344 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4345 /* DevCamDebug metadata processCaptureRequest */
4346 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4347 mDevCamDebugMetaEnable =
4348 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4349 }
4350 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4351 /* DevCamDebug metadata end */
4352
4353 //extract CAC info
4354 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4355 mCacMode =
4356 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4357 }
4358 pendingRequest.fwkCacMode = mCacMode;
4359
4360 PendingBuffersInRequest bufsForCurRequest;
4361 bufsForCurRequest.frame_number = frameNumber;
4362 // Mark current timestamp for the new request
4363 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4364
4365 for (size_t i = 0; i < request->num_output_buffers; i++) {
4366 RequestedBufferInfo requestedBuf;
4367 memset(&requestedBuf, 0, sizeof(requestedBuf));
4368 requestedBuf.stream = request->output_buffers[i].stream;
4369 requestedBuf.buffer = NULL;
4370 pendingRequest.buffers.push_back(requestedBuf);
4371
4372 // Add to buffer handle the pending buffers list
4373 PendingBufferInfo bufferInfo;
4374 bufferInfo.buffer = request->output_buffers[i].buffer;
4375 bufferInfo.stream = request->output_buffers[i].stream;
4376 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4377 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4378 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4379 frameNumber, bufferInfo.buffer,
4380 channel->getStreamTypeMask(), bufferInfo.stream->format);
4381 }
4382 // Add this request packet into mPendingBuffersMap
4383 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4384 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4385 mPendingBuffersMap.get_num_overall_buffers());
4386
4387 latestRequest = mPendingRequestsList.insert(
4388 mPendingRequestsList.end(), pendingRequest);
4389 if(mFlush) {
4390 LOGI("mFlush is true");
4391 pthread_mutex_unlock(&mMutex);
4392 return NO_ERROR;
4393 }
4394
4395 int indexUsed;
4396 // Notify metadata channel we receive a request
4397 mMetadataChannel->request(NULL, frameNumber, indexUsed);
4398
4399 if(request->input_buffer != NULL){
4400 LOGD("Input request, frame_number %d", frameNumber);
4401 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4402 if (NO_ERROR != rc) {
4403 LOGE("fail to set reproc parameters");
4404 pthread_mutex_unlock(&mMutex);
4405 return rc;
4406 }
4407 }
4408
4409 // Call request on other streams
4410 uint32_t streams_need_metadata = 0;
4411 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4412 for (size_t i = 0; i < request->num_output_buffers; i++) {
4413 const camera3_stream_buffer_t& output = request->output_buffers[i];
4414 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4415
4416 if (channel == NULL) {
4417 LOGW("invalid channel pointer for stream");
4418 continue;
4419 }
4420
4421 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4422 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4423 output.buffer, request->input_buffer, frameNumber);
4424 if(request->input_buffer != NULL){
4425 rc = channel->request(output.buffer, frameNumber,
4426 pInputBuffer, &mReprocMeta, indexUsed);
4427 if (rc < 0) {
4428 LOGE("Fail to request on picture channel");
4429 pthread_mutex_unlock(&mMutex);
4430 return rc;
4431 }
4432 } else {
4433 LOGD("snapshot request with buffer %p, frame_number %d",
4434 output.buffer, frameNumber);
4435 if (!request->settings) {
4436 rc = channel->request(output.buffer, frameNumber,
4437 NULL, mPrevParameters, indexUsed);
4438 } else {
4439 rc = channel->request(output.buffer, frameNumber,
4440 NULL, mParameters, indexUsed);
4441 }
4442 if (rc < 0) {
4443 LOGE("Fail to request on picture channel");
4444 pthread_mutex_unlock(&mMutex);
4445 return rc;
4446 }
4447
4448 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4449 uint32_t j = 0;
4450 for (j = 0; j < streamsArray.num_streams; j++) {
4451 if (streamsArray.stream_request[j].streamID == streamId) {
4452 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4453 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4454 else
4455 streamsArray.stream_request[j].buf_index = indexUsed;
4456 break;
4457 }
4458 }
4459 if (j == streamsArray.num_streams) {
4460 LOGE("Did not find matching stream to update index");
4461 assert(0);
4462 }
4463
4464 pendingBufferIter->need_metadata = true;
4465
4466 if (isEISCropInSnapshotNeeded(meta)) {
4467 pendingBufferIter->need_crop = true;
4468 pendingBufferIter->crop_info = mLastEISCropInfo;
4469 }
4470
4471 streams_need_metadata++;
4472 }
4473 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4474 bool needMetadata = false;
4475 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4476 rc = yuvChannel->request(output.buffer, frameNumber,
4477 pInputBuffer,
4478 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4479 if (rc < 0) {
4480 LOGE("Fail to request on YUV channel");
4481 pthread_mutex_unlock(&mMutex);
4482 return rc;
4483 }
4484
4485 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4486 uint32_t j = 0;
4487 for (j = 0; j < streamsArray.num_streams; j++) {
4488 if (streamsArray.stream_request[j].streamID == streamId) {
4489 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4490 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4491 else
4492 streamsArray.stream_request[j].buf_index = indexUsed;
4493 break;
4494 }
4495 }
4496 if (j == streamsArray.num_streams) {
4497 LOGE("Did not find matching stream to update index");
4498 assert(0);
4499 }
4500
4501 pendingBufferIter->need_metadata = needMetadata;
4502 if (needMetadata)
4503 streams_need_metadata += 1;
4504 LOGD("calling YUV channel request, need_metadata is %d",
4505 needMetadata);
4506 } else {
4507 LOGD("request with buffer %p, frame_number %d",
4508 output.buffer, frameNumber);
4509
4510 rc = channel->request(output.buffer, frameNumber, indexUsed);
4511
4512 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4513 uint32_t j = 0;
4514 for (j = 0; j < streamsArray.num_streams; j++) {
4515 if (streamsArray.stream_request[j].streamID == streamId) {
4516 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4517 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4518 else
4519 streamsArray.stream_request[j].buf_index = indexUsed;
4520 break;
4521 }
4522 }
4523 if (j == streamsArray.num_streams) {
4524 LOGE("Did not find matching stream to update index");
4525 assert(0);
4526 }
4527
4528 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4529 && mBatchSize) {
4530 mToBeQueuedVidBufs++;
4531 if (mToBeQueuedVidBufs == mBatchSize) {
4532 channel->queueBatchBuf();
4533 }
4534 }
4535 if (rc < 0) {
4536 LOGE("request failed");
4537 pthread_mutex_unlock(&mMutex);
4538 return rc;
4539 }
4540 }
4541 pendingBufferIter++;
4542 }
4543
4544 //If 2 streams have need_metadata set to true, fail the request, unless
4545 //we copy/reference count the metadata buffer
4546 if (streams_need_metadata > 1) {
4547 LOGE("not supporting request in which two streams requires"
4548 " 2 HAL metadata for reprocessing");
4549 pthread_mutex_unlock(&mMutex);
4550 return -EINVAL;
4551 }
4552
4553 if (request->input_buffer == NULL) {
4554 /* Set the parameters to backend:
4555 * - For every request in NORMAL MODE
4556 * - For every request in HFR mode during preview only case
4557 * - Once every batch in HFR mode during video recording
4558 */
4559 if (!mBatchSize ||
4560 (mBatchSize && !isVidBufRequested) ||
4561 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4562 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4563 mBatchSize, isVidBufRequested,
4564 mToBeQueuedVidBufs);
4565
4566 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4567 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4568 uint32_t m = 0;
4569 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4570 if (streamsArray.stream_request[k].streamID ==
4571 mBatchedStreamsArray.stream_request[m].streamID)
4572 break;
4573 }
4574 if (m == mBatchedStreamsArray.num_streams) {
4575 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4576 streamsArray.stream_request[k].streamID;
4577 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4578 streamsArray.stream_request[k].buf_index;
4579 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4580 }
4581 }
4582 streamsArray = mBatchedStreamsArray;
4583 }
4584 /* Update stream id of all the requested buffers */
4585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4586 LOGE("Failed to set stream type mask in the parameters");
4587 pthread_mutex_unlock(&mMutex);
4588 return BAD_VALUE;
4589 }
4590
4591 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4592 mParameters);
4593 if (rc < 0) {
4594 LOGE("set_parms failed");
4595 }
4596 /* reset to zero coz, the batch is queued */
4597 mToBeQueuedVidBufs = 0;
4598 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4599 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4600 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4601 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4602 uint32_t m = 0;
4603 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4604 if (streamsArray.stream_request[k].streamID ==
4605 mBatchedStreamsArray.stream_request[m].streamID)
4606 break;
4607 }
4608 if (m == mBatchedStreamsArray.num_streams) {
4609 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4610 streamsArray.stream_request[k].streamID;
4611 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4612 streamsArray.stream_request[k].buf_index;
4613 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4614 }
4615 }
4616 }
4617 mPendingLiveRequest++;
4618 }
4619
4620 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4621
4622 mState = STARTED;
4623 // Added a timed condition wait
4624 struct timespec ts;
4625 uint8_t isValidTimeout = 1;
4626 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
4627 if (rc < 0) {
4628 isValidTimeout = 0;
4629 LOGE("Error reading the real time clock!!");
4630 }
4631 else {
4632 // Make timeout as 5 sec for request to be honored
4633 ts.tv_sec += 5;
4634 }
4635 //Block on conditional variable
4636 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4637 (mState != ERROR) && (mState != DEINIT)) {
4638 if (!isValidTimeout) {
4639 LOGD("Blocking on conditional wait");
4640 pthread_cond_wait(&mRequestCond, &mMutex);
4641 }
4642 else {
4643 LOGD("Blocking on timed conditional wait");
4644 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4645 if (rc == ETIMEDOUT) {
4646 rc = -ENODEV;
4647 LOGE("Unblocked on timeout!!!!");
4648 break;
4649 }
4650 }
4651 LOGD("Unblocked");
4652 if (mWokenUpByDaemon) {
4653 mWokenUpByDaemon = false;
4654 if (mPendingLiveRequest < mMaxInFlightRequests)
4655 break;
4656 }
4657 }
4658 pthread_mutex_unlock(&mMutex);
4659
4660 return rc;
4661 }
4662
4663 /*===========================================================================
4664 * FUNCTION : dump
4665 *
4666 * DESCRIPTION:
4667 *
4668 * PARAMETERS :
4669 *
4670 *
4671 * RETURN :
4672 *==========================================================================*/
dump(int fd)4673 void QCamera3HardwareInterface::dump(int fd)
4674 {
4675 pthread_mutex_lock(&mMutex);
4676 dprintf(fd, "\n Camera HAL3 information Begin \n");
4677
4678 dprintf(fd, "\nNumber of pending requests: %zu \n",
4679 mPendingRequestsList.size());
4680 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4681 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
4682 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4683 for(pendingRequestIterator i = mPendingRequestsList.begin();
4684 i != mPendingRequestsList.end(); i++) {
4685 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4686 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4687 i->input_buffer);
4688 }
4689 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4690 mPendingBuffersMap.get_num_overall_buffers());
4691 dprintf(fd, "-------+------------------\n");
4692 dprintf(fd, " Frame | Stream type mask \n");
4693 dprintf(fd, "-------+------------------\n");
4694 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4695 for(auto &j : req.mPendingBufferList) {
4696 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4697 dprintf(fd, " %5d | %11d \n",
4698 req.frame_number, channel->getStreamTypeMask());
4699 }
4700 }
4701 dprintf(fd, "-------+------------------\n");
4702
4703 dprintf(fd, "\nPending frame drop list: %zu\n",
4704 mPendingFrameDropList.size());
4705 dprintf(fd, "-------+-----------\n");
4706 dprintf(fd, " Frame | Stream ID \n");
4707 dprintf(fd, "-------+-----------\n");
4708 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4709 i != mPendingFrameDropList.end(); i++) {
4710 dprintf(fd, " %5d | %9d \n",
4711 i->frame_number, i->stream_ID);
4712 }
4713 dprintf(fd, "-------+-----------\n");
4714
4715 dprintf(fd, "\n Camera HAL3 information End \n");
4716
4717 /* use dumpsys media.camera as trigger to send update debug level event */
4718 mUpdateDebugLevel = true;
4719 pthread_mutex_unlock(&mMutex);
4720 return;
4721 }
4722
4723 /*===========================================================================
4724 * FUNCTION : flush
4725 *
4726 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4727 * conditionally restarts channels
4728 *
4729 * PARAMETERS :
4730 * @ restartChannels: re-start all channels
4731 *
4732 *
4733 * RETURN :
4734 * 0 on success
4735 * Error code on failure
4736 *==========================================================================*/
flush(bool restartChannels)4737 int QCamera3HardwareInterface::flush(bool restartChannels)
4738 {
4739 KPI_ATRACE_CALL();
4740 int32_t rc = NO_ERROR;
4741
4742 LOGD("Unblocking Process Capture Request");
4743 pthread_mutex_lock(&mMutex);
4744 mFlush = true;
4745 pthread_mutex_unlock(&mMutex);
4746
4747 rc = stopAllChannels();
4748 // unlink of dualcam
4749 if (mIsDeviceLinked) {
4750 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4751 pthread_mutex_lock(&gCamLock);
4752
4753 if (mIsMainCamera == 1) {
4754 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4755 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4756 // related session id should be session id of linked session
4757 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4758 } else {
4759 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4760 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4761 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4762 }
4763 pthread_mutex_unlock(&gCamLock);
4764
4765 rc = mCameraHandle->ops->sync_related_sensors(
4766 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4767 if (rc < 0) {
4768 LOGE("Dualcam: Unlink failed, but still proceed to close");
4769 }
4770 }
4771
4772 if (rc < 0) {
4773 LOGE("stopAllChannels failed");
4774 return rc;
4775 }
4776 if (mChannelHandle) {
4777 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4778 mChannelHandle);
4779 }
4780
4781 // Reset bundle info
4782 rc = setBundleInfo();
4783 if (rc < 0) {
4784 LOGE("setBundleInfo failed %d", rc);
4785 return rc;
4786 }
4787
4788 // Mutex Lock
4789 pthread_mutex_lock(&mMutex);
4790
4791 // Unblock process_capture_request
4792 mPendingLiveRequest = 0;
4793 pthread_cond_signal(&mRequestCond);
4794
4795 rc = notifyErrorForPendingRequests();
4796 if (rc < 0) {
4797 LOGE("notifyErrorForPendingRequests failed");
4798 pthread_mutex_unlock(&mMutex);
4799 return rc;
4800 }
4801
4802 mFlush = false;
4803
4804 // Start the Streams/Channels
4805 if (restartChannels) {
4806 rc = startAllChannels();
4807 if (rc < 0) {
4808 LOGE("startAllChannels failed");
4809 pthread_mutex_unlock(&mMutex);
4810 return rc;
4811 }
4812 }
4813
4814 if (mChannelHandle) {
4815 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4816 mChannelHandle);
4817 if (rc < 0) {
4818 LOGE("start_channel failed");
4819 pthread_mutex_unlock(&mMutex);
4820 return rc;
4821 }
4822 }
4823
4824 pthread_mutex_unlock(&mMutex);
4825
4826 return 0;
4827 }
4828
4829 /*===========================================================================
4830 * FUNCTION : flushPerf
4831 *
4832 * DESCRIPTION: This is the performance optimization version of flush that does
4833 * not use stream off, rather flushes the system
4834 *
4835 * PARAMETERS :
4836 *
4837 *
4838 * RETURN : 0 : success
4839 * -EINVAL: input is malformed (device is not valid)
4840 * -ENODEV: if the device has encountered a serious error
4841 *==========================================================================*/
flushPerf()4842 int QCamera3HardwareInterface::flushPerf()
4843 {
4844 ATRACE_CALL();
4845 int32_t rc = 0;
4846 struct timespec timeout;
4847 bool timed_wait = false;
4848
4849 pthread_mutex_lock(&mMutex);
4850 mFlushPerf = true;
4851 mPendingBuffersMap.numPendingBufsAtFlush =
4852 mPendingBuffersMap.get_num_overall_buffers();
4853 LOGD("Calling flush. Wait for %d buffers to return",
4854 mPendingBuffersMap.numPendingBufsAtFlush);
4855
4856 /* send the flush event to the backend */
4857 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4858 if (rc < 0) {
4859 LOGE("Error in flush: IOCTL failure");
4860 mFlushPerf = false;
4861 pthread_mutex_unlock(&mMutex);
4862 return -ENODEV;
4863 }
4864
4865 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4866 LOGD("No pending buffers in HAL, return flush");
4867 mFlushPerf = false;
4868 pthread_mutex_unlock(&mMutex);
4869 return rc;
4870 }
4871
4872 /* wait on a signal that buffers were received */
4873 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
4874 if (rc < 0) {
4875 LOGE("Error reading the real time clock, cannot use timed wait");
4876 } else {
4877 timeout.tv_sec += FLUSH_TIMEOUT;
4878 timed_wait = true;
4879 }
4880
4881 //Block on conditional variable
4882 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4883 LOGD("Waiting on mBuffersCond");
4884 if (!timed_wait) {
4885 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4886 if (rc != 0) {
4887 LOGE("pthread_cond_wait failed due to rc = %s",
4888 strerror(rc));
4889 break;
4890 }
4891 } else {
4892 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4893 if (rc != 0) {
4894 LOGE("pthread_cond_timedwait failed due to rc = %s",
4895 strerror(rc));
4896 break;
4897 }
4898 }
4899 }
4900 if (rc != 0) {
4901 mFlushPerf = false;
4902 pthread_mutex_unlock(&mMutex);
4903 return -ENODEV;
4904 }
4905
4906 LOGD("Received buffers, now safe to return them");
4907
4908 //make sure the channels handle flush
4909 //currently only required for the picture channel to release snapshot resources
4910 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4911 it != mStreamInfo.end(); it++) {
4912 QCamera3Channel *channel = (*it)->channel;
4913 if (channel) {
4914 rc = channel->flush();
4915 if (rc) {
4916 LOGE("Flushing the channels failed with error %d", rc);
4917 // even though the channel flush failed we need to continue and
4918 // return the buffers we have to the framework, however the return
4919 // value will be an error
4920 rc = -ENODEV;
4921 }
4922 }
4923 }
4924
4925 /* notify the frameworks and send errored results */
4926 rc = notifyErrorForPendingRequests();
4927 if (rc < 0) {
4928 LOGE("notifyErrorForPendingRequests failed");
4929 pthread_mutex_unlock(&mMutex);
4930 return rc;
4931 }
4932
4933 //unblock process_capture_request
4934 mPendingLiveRequest = 0;
4935 unblockRequestIfNecessary();
4936
4937 mFlushPerf = false;
4938 pthread_mutex_unlock(&mMutex);
4939 LOGD ("Flush Operation complete. rc = %d", rc);
4940 return rc;
4941 }
4942
4943 /*===========================================================================
4944 * FUNCTION : handleCameraDeviceError
4945 *
4946 * DESCRIPTION: This function calls internal flush and notifies the error to
4947 * framework and updates the state variable.
4948 *
4949 * PARAMETERS : None
4950 *
4951 * RETURN : NO_ERROR on Success
4952 * Error code on failure
4953 *==========================================================================*/
handleCameraDeviceError()4954 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4955 {
4956 int32_t rc = NO_ERROR;
4957
4958 pthread_mutex_lock(&mMutex);
4959 if (mState != ERROR) {
4960 //if mState != ERROR, nothing to be done
4961 pthread_mutex_unlock(&mMutex);
4962 return NO_ERROR;
4963 }
4964 pthread_mutex_unlock(&mMutex);
4965
4966 rc = flush(false /* restart channels */);
4967 if (NO_ERROR != rc) {
4968 LOGE("internal flush to handle mState = ERROR failed");
4969 }
4970
4971 pthread_mutex_lock(&mMutex);
4972 mState = DEINIT;
4973 pthread_mutex_unlock(&mMutex);
4974
4975 camera3_notify_msg_t notify_msg;
4976 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
4977 notify_msg.type = CAMERA3_MSG_ERROR;
4978 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4979 notify_msg.message.error.error_stream = NULL;
4980 notify_msg.message.error.frame_number = 0;
4981 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
4982
4983 return rc;
4984 }
4985
4986 /*===========================================================================
4987 * FUNCTION : captureResultCb
4988 *
4989 * DESCRIPTION: Callback handler for all capture result
4990 * (streams, as well as metadata)
4991 *
4992 * PARAMETERS :
4993 * @metadata : metadata information
4994 * @buffer : actual gralloc buffer to be returned to frameworks.
4995 * NULL if metadata.
4996 *
4997 * RETURN : NONE
4998 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata_buf,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer)4999 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5000 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5001 {
5002 if (metadata_buf) {
5003 pthread_mutex_lock(&mMutex);
5004 uint8_t batchSize = mBatchSize;
5005 pthread_mutex_unlock(&mMutex);
5006 if (batchSize) {
5007 handleBatchMetadata(metadata_buf,
5008 true /* free_and_bufdone_meta_buf */);
5009 } else { /* mBatchSize = 0 */
5010 hdrPlusPerfLock(metadata_buf);
5011 pthread_mutex_lock(&mMutex);
5012 handleMetadataWithLock(metadata_buf,
5013 true /* free_and_bufdone_meta_buf */,
5014 true /* last urgent frame of batch metadata */,
5015 true /* last frame of batch metadata */ );
5016 pthread_mutex_unlock(&mMutex);
5017 }
5018 } else if (isInputBuffer) {
5019 pthread_mutex_lock(&mMutex);
5020 handleInputBufferWithLock(frame_number);
5021 pthread_mutex_unlock(&mMutex);
5022 } else {
5023 pthread_mutex_lock(&mMutex);
5024 handleBufferWithLock(buffer, frame_number);
5025 pthread_mutex_unlock(&mMutex);
5026 }
5027 return;
5028 }
5029
5030 /*===========================================================================
5031 * FUNCTION : getReprocessibleOutputStreamId
5032 *
5033 * DESCRIPTION: Get source output stream id for the input reprocess stream
5034 * based on size and format, which would be the largest
5035 * output stream if an input stream exists.
5036 *
5037 * PARAMETERS :
5038 * @id : return the stream id if found
5039 *
5040 * RETURN : int32_t type of status
5041 * NO_ERROR -- success
5042 * none-zero failure code
5043 *==========================================================================*/
getReprocessibleOutputStreamId(uint32_t & id)5044 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5045 {
5046 /* check if any output or bidirectional stream with the same size and format
5047 and return that stream */
5048 if ((mInputStreamInfo.dim.width > 0) &&
5049 (mInputStreamInfo.dim.height > 0)) {
5050 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5051 it != mStreamInfo.end(); it++) {
5052
5053 camera3_stream_t *stream = (*it)->stream;
5054 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5055 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5056 (stream->format == mInputStreamInfo.format)) {
5057 // Usage flag for an input stream and the source output stream
5058 // may be different.
5059 LOGD("Found reprocessible output stream! %p", *it);
5060 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5061 stream->usage, mInputStreamInfo.usage);
5062
5063 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5064 if (channel != NULL && channel->mStreams[0]) {
5065 id = channel->mStreams[0]->getMyServerID();
5066 return NO_ERROR;
5067 }
5068 }
5069 }
5070 } else {
5071 LOGD("No input stream, so no reprocessible output stream");
5072 }
5073 return NAME_NOT_FOUND;
5074 }
5075
5076 /*===========================================================================
5077 * FUNCTION : lookupFwkName
5078 *
5079 * DESCRIPTION: In case the enum is not same in fwk and backend
5080 * make sure the parameter is correctly propogated
5081 *
5082 * PARAMETERS :
5083 * @arr : map between the two enums
5084 * @len : len of the map
5085 * @hal_name : name of the hal_parm to map
5086 *
5087 * RETURN : int type of status
5088 * fwk_name -- success
5089 * none-zero failure code
5090 *==========================================================================*/
lookupFwkName(const mapType * arr,size_t len,halType hal_name)5091 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5092 size_t len, halType hal_name)
5093 {
5094
5095 for (size_t i = 0; i < len; i++) {
5096 if (arr[i].hal_name == hal_name) {
5097 return arr[i].fwk_name;
5098 }
5099 }
5100
5101 /* Not able to find matching framework type is not necessarily
5102 * an error case. This happens when mm-camera supports more attributes
5103 * than the frameworks do */
5104 LOGH("Cannot find matching framework type");
5105 return NAME_NOT_FOUND;
5106 }
5107
5108 /*===========================================================================
5109 * FUNCTION : lookupHalName
5110 *
5111 * DESCRIPTION: In case the enum is not same in fwk and backend
5112 * make sure the parameter is correctly propogated
5113 *
5114 * PARAMETERS :
5115 * @arr : map between the two enums
5116 * @len : len of the map
5117 * @fwk_name : name of the hal_parm to map
5118 *
5119 * RETURN : int32_t type of status
5120 * hal_name -- success
5121 * none-zero failure code
5122 *==========================================================================*/
lookupHalName(const mapType * arr,size_t len,fwkType fwk_name)5123 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5124 size_t len, fwkType fwk_name)
5125 {
5126 for (size_t i = 0; i < len; i++) {
5127 if (arr[i].fwk_name == fwk_name) {
5128 return arr[i].hal_name;
5129 }
5130 }
5131
5132 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5133 return NAME_NOT_FOUND;
5134 }
5135
5136 /*===========================================================================
5137 * FUNCTION : lookupProp
5138 *
5139 * DESCRIPTION: lookup a value by its name
5140 *
5141 * PARAMETERS :
5142 * @arr : map between the two enums
5143 * @len : size of the map
5144 * @name : name to be looked up
5145 *
5146 * RETURN : Value if found
5147 * CAM_CDS_MODE_MAX if not found
5148 *==========================================================================*/
lookupProp(const mapType * arr,size_t len,const char * name)5149 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5150 size_t len, const char *name)
5151 {
5152 if (name) {
5153 for (size_t i = 0; i < len; i++) {
5154 if (!strcmp(arr[i].desc, name)) {
5155 return arr[i].val;
5156 }
5157 }
5158 }
5159 return CAM_CDS_MODE_MAX;
5160 }
5161
5162 /*===========================================================================
5163 *
5164 * DESCRIPTION:
5165 *
5166 * PARAMETERS :
5167 * @metadata : metadata information from callback
5168 * @pendingRequest: pending request for this metadata
5169 * @pprocDone: whether internal offline postprocsesing is done
5170 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
5171 * in a batch. Always true for non-batch mode.
5172 *
5173 * RETURN : camera_metadata_t*
5174 * metadata in a format specified by fwk
5175 *==========================================================================*/
5176 camera_metadata_t*
translateFromHalMetadata(metadata_buffer_t * metadata,const PendingRequestInfo & pendingRequest,bool pprocDone,bool lastMetadataInBatch)5177 QCamera3HardwareInterface::translateFromHalMetadata(
5178 metadata_buffer_t *metadata,
5179 const PendingRequestInfo& pendingRequest,
5180 bool pprocDone,
5181 bool lastMetadataInBatch)
5182 {
5183 CameraMetadata camMetadata;
5184 camera_metadata_t *resultMetadata;
5185
5186 if (!lastMetadataInBatch) {
5187 /* In batch mode, use empty metadata if this is not the last in batch*/
5188 resultMetadata = allocate_camera_metadata(0, 0);
5189 return resultMetadata;
5190 }
5191
5192 if (pendingRequest.jpegMetadata.entryCount())
5193 camMetadata.append(pendingRequest.jpegMetadata);
5194
5195 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
5196 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
5197 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
5198 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
5199 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
5200 if (mBatchSize == 0) {
5201 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5202 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
5203 }
5204
5205 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5206 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5207 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
5208 // DevCamDebug metadata translateFromHalMetadata AF
5209 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5210 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5211 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5212 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5213 }
5214 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5215 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5216 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5217 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5218 }
5219 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5220 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5221 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5222 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5223 }
5224 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5225 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5226 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5227 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5228 }
5229 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5230 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5231 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5232 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5233 }
5234 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5235 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5236 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5237 *DevCamDebug_af_monitor_pdaf_target_pos;
5238 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5239 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5240 }
5241 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5242 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5243 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5244 *DevCamDebug_af_monitor_pdaf_confidence;
5245 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5246 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5247 }
5248 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5249 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5250 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5251 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5252 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5253 }
5254 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5255 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5256 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5257 *DevCamDebug_af_monitor_tof_target_pos;
5258 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5259 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5260 }
5261 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5262 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5263 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5264 *DevCamDebug_af_monitor_tof_confidence;
5265 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5266 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5267 }
5268 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5269 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5270 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5271 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5272 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5273 }
5274 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5275 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5276 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5277 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5278 &fwk_DevCamDebug_af_monitor_type_select, 1);
5279 }
5280 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5281 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5282 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5283 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5284 &fwk_DevCamDebug_af_monitor_refocus, 1);
5285 }
5286 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5287 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5288 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5289 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5290 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5291 }
5292 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5293 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5294 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5295 *DevCamDebug_af_search_pdaf_target_pos;
5296 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5297 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5298 }
5299 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5300 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5301 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5302 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5303 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5304 }
5305 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5306 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5307 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5308 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5309 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5310 }
5311 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5312 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5313 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5314 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5315 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5316 }
5317 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5318 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5319 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5320 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5321 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5322 }
5323 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5324 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5325 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5326 *DevCamDebug_af_search_tof_target_pos;
5327 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5328 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5329 }
5330 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5331 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5332 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5333 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5334 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5335 }
5336 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5337 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5338 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5339 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5340 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5341 }
5342 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5343 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5344 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5345 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5346 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5347 }
5348 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5349 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5350 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5351 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5352 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5353 }
5354 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5355 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5356 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5357 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5358 &fwk_DevCamDebug_af_search_type_select, 1);
5359 }
5360 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5361 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5362 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5363 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5364 &fwk_DevCamDebug_af_search_next_pos, 1);
5365 }
5366 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5367 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5368 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5369 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5370 &fwk_DevCamDebug_af_search_target_pos, 1);
5371 }
5372 // DevCamDebug metadata translateFromHalMetadata AEC
5373 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5374 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5375 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5376 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5377 }
5378 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5379 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5380 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5381 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5382 }
5383 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5384 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5385 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5386 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5387 }
5388 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5389 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5390 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5391 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5392 }
5393 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5394 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5395 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5396 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5397 }
5398 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5399 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5400 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5401 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5402 }
5403 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5404 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5405 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5406 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5407 }
5408 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5409 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5410 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5411 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5412 }
5413 // DevCamDebug metadata translateFromHalMetadata AWB
5414 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5415 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5416 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5417 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5418 }
5419 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5420 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5421 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5422 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5423 }
5424 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5425 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5426 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5427 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5428 }
5429 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5430 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5431 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5432 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5433 }
5434 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5435 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5436 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5437 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5438 }
5439 }
5440 // atrace_end(ATRACE_TAG_ALWAYS);
5441
5442 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5443 int64_t fwk_frame_number = *frame_number;
5444 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5445 }
5446
5447 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5448 int32_t fps_range[2];
5449 fps_range[0] = (int32_t)float_range->min_fps;
5450 fps_range[1] = (int32_t)float_range->max_fps;
5451 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5452 fps_range, 2);
5453 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5454 fps_range[0], fps_range[1]);
5455 }
5456
5457 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5458 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5459 }
5460
5461 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5462 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5463 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5464 *sceneMode);
5465 if (NAME_NOT_FOUND != val) {
5466 uint8_t fwkSceneMode = (uint8_t)val;
5467 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5468 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5469 fwkSceneMode);
5470 }
5471 }
5472
5473 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5474 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5475 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5476 }
5477
5478 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5479 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5480 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5481 }
5482
5483 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5484 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5485 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5486 }
5487
5488 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5489 CAM_INTF_META_EDGE_MODE, metadata) {
5490 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5491 }
5492
5493 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5494 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5495 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5496 }
5497
5498 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5499 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5500 }
5501
5502 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5503 if (0 <= *flashState) {
5504 uint8_t fwk_flashState = (uint8_t) *flashState;
5505 if (!gCamCapability[mCameraId]->flash_available) {
5506 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5507 }
5508 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5509 }
5510 }
5511
5512 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5513 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5514 if (NAME_NOT_FOUND != val) {
5515 uint8_t fwk_flashMode = (uint8_t)val;
5516 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5517 }
5518 }
5519
5520 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5521 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5522 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5523 }
5524
5525 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5526 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5527 }
5528
5529 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5530 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5531 }
5532
5533 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5534 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5535 }
5536
5537 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5538 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5539 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5540 }
5541
5542 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5543 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5544 LOGD("fwk_videoStab = %d", fwk_videoStab);
5545 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5546 } else {
5547 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5548 // and so hardcoding the Video Stab result to OFF mode.
5549 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5550 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5551 LOGD("%s: EIS result default to OFF mode", __func__);
5552 }
5553
5554 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5555 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5556 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5557 }
5558
5559 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5560 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5561 }
5562
5563 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5564 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5565 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5566
5567 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5568 gCamCapability[mCameraId]->color_arrangement);
5569
5570 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5571 blackLevelAppliedPattern->cam_black_level[0],
5572 blackLevelAppliedPattern->cam_black_level[1],
5573 blackLevelAppliedPattern->cam_black_level[2],
5574 blackLevelAppliedPattern->cam_black_level[3]);
5575 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5576 BLACK_LEVEL_PATTERN_CNT);
5577
5578 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5579 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5580 // depth space.
5581 fwk_blackLevelInd[0] /= 4.0;
5582 fwk_blackLevelInd[1] /= 4.0;
5583 fwk_blackLevelInd[2] /= 4.0;
5584 fwk_blackLevelInd[3] /= 4.0;
5585 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5586 BLACK_LEVEL_PATTERN_CNT);
5587 }
5588
5589 // Fixed whitelevel is used by ISP/Sensor
5590 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5591 &gCamCapability[mCameraId]->white_level, 1);
5592
5593 IF_META_AVAILABLE(cam_eis_crop_info_t, eisCropInfo,
5594 CAM_INTF_META_EIS_CROP_INFO, metadata) {
5595 mLastEISCropInfo = *eisCropInfo;
5596
5597 mCropRegionMapper.toActiveArray(mLastEISCropInfo.delta_x, mLastEISCropInfo.delta_y,
5598 mLastEISCropInfo.delta_width, mLastEISCropInfo.delta_height);
5599 }
5600
5601 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5602 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5603 int32_t scalerCropRegion[4];
5604 scalerCropRegion[0] = hScalerCropRegion->left;
5605 scalerCropRegion[1] = hScalerCropRegion->top;
5606 scalerCropRegion[2] = hScalerCropRegion->width;
5607 scalerCropRegion[3] = hScalerCropRegion->height;
5608
5609 // Adjust crop region from sensor output coordinate system to active
5610 // array coordinate system.
5611 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5612 scalerCropRegion[2], scalerCropRegion[3]);
5613
5614 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5615 }
5616
5617 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5618 LOGD("sensorExpTime = %lld", *sensorExpTime);
5619 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5620 }
5621
5622 IF_META_AVAILABLE(int64_t, sensorFameDuration,
5623 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5624 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5625 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5626 }
5627
5628 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5629 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5630 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5631 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5632 sensorRollingShutterSkew, 1);
5633 }
5634
5635 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5636 LOGD("sensorSensitivity = %d", *sensorSensitivity);
5637 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5638
5639 //calculate the noise profile based on sensitivity
5640 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5641 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5642 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5643 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5644 noise_profile[i] = noise_profile_S;
5645 noise_profile[i+1] = noise_profile_O;
5646 }
5647 LOGD("noise model entry (S, O) is (%f, %f)",
5648 noise_profile_S, noise_profile_O);
5649 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5650 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5651 }
5652
5653 int32_t fwk_ispSensitivity = 100;
5654 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5655 fwk_ispSensitivity = (int32_t) *ispSensitivity;
5656 }
5657 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5658 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5659 }
5660 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5661
5662 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5663 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5664 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5665 }
5666
5667 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5668 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5669 *faceDetectMode);
5670 if (NAME_NOT_FOUND != val) {
5671 uint8_t fwk_faceDetectMode = (uint8_t)val;
5672 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5673
5674 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5675 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5676 CAM_INTF_META_FACE_DETECTION, metadata) {
5677 uint8_t numFaces = MIN(
5678 faceDetectionInfo->num_faces_detected, MAX_ROI);
5679 int32_t faceIds[MAX_ROI];
5680 uint8_t faceScores[MAX_ROI];
5681 int32_t faceRectangles[MAX_ROI * 4];
5682 int32_t faceLandmarks[MAX_ROI * 6];
5683 size_t j = 0, k = 0;
5684
5685 for (size_t i = 0; i < numFaces; i++) {
5686 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5687 // Adjust crop region from sensor output coordinate system to active
5688 // array coordinate system.
5689 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5690 mCropRegionMapper.toActiveArray(rect.left, rect.top,
5691 rect.width, rect.height);
5692
5693 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5694 faceRectangles+j, -1);
5695
5696 j+= 4;
5697 }
5698 if (numFaces <= 0) {
5699 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5700 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5701 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5702 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5703 }
5704
5705 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5706 numFaces);
5707 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5708 faceRectangles, numFaces * 4U);
5709 if (fwk_faceDetectMode ==
5710 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5711 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5712 CAM_INTF_META_FACE_LANDMARK, metadata) {
5713
5714 for (size_t i = 0; i < numFaces; i++) {
5715 // Map the co-ordinate sensor output coordinate system to active
5716 // array coordinate system.
5717 mCropRegionMapper.toActiveArray(
5718 landmarks->face_landmarks[i].left_eye_center.x,
5719 landmarks->face_landmarks[i].left_eye_center.y);
5720 mCropRegionMapper.toActiveArray(
5721 landmarks->face_landmarks[i].right_eye_center.x,
5722 landmarks->face_landmarks[i].right_eye_center.y);
5723 mCropRegionMapper.toActiveArray(
5724 landmarks->face_landmarks[i].mouth_center.x,
5725 landmarks->face_landmarks[i].mouth_center.y);
5726
5727 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5728 k+= 6;
5729 }
5730 }
5731
5732 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5733 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5734 faceLandmarks, numFaces * 6U);
5735 }
5736 }
5737 }
5738 }
5739 }
5740
5741 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5742 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5743 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5744 }
5745
5746 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5747 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5748 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5749 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5750 }
5751
5752 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5753 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5754 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5755 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5756 }
5757
5758 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5759 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5760 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5761 CAM_MAX_SHADING_MAP_HEIGHT);
5762 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5763 CAM_MAX_SHADING_MAP_WIDTH);
5764 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5765 lensShadingMap->lens_shading, 4U * map_width * map_height);
5766 }
5767
5768 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5769 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5770 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5771 }
5772
5773 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5774 //Populate CAM_INTF_META_TONEMAP_CURVES
5775 /* ch0 = G, ch 1 = B, ch 2 = R*/
5776 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5777 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5778 tonemap->tonemap_points_cnt,
5779 CAM_MAX_TONEMAP_CURVE_SIZE);
5780 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5781 }
5782
5783 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5784 &tonemap->curves[0].tonemap_points[0][0],
5785 tonemap->tonemap_points_cnt * 2);
5786
5787 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5788 &tonemap->curves[1].tonemap_points[0][0],
5789 tonemap->tonemap_points_cnt * 2);
5790
5791 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5792 &tonemap->curves[2].tonemap_points[0][0],
5793 tonemap->tonemap_points_cnt * 2);
5794 }
5795
5796 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5797 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5798 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5799 CC_GAINS_COUNT);
5800 }
5801
5802 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5803 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5804 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5805 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5806 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5807 }
5808
5809 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5810 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5811 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5812 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5813 toneCurve->tonemap_points_cnt,
5814 CAM_MAX_TONEMAP_CURVE_SIZE);
5815 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5816 }
5817 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5818 (float*)toneCurve->curve.tonemap_points,
5819 toneCurve->tonemap_points_cnt * 2);
5820 }
5821
5822 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5823 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5824 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5825 predColorCorrectionGains->gains, 4);
5826 }
5827
5828 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5829 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5830 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5831 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5832 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5833 }
5834
5835 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5836 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5837 }
5838
5839 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5840 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5841 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5842 }
5843
5844 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5845 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5846 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5847 }
5848
5849 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5850 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5851 *effectMode);
5852 if (NAME_NOT_FOUND != val) {
5853 uint8_t fwk_effectMode = (uint8_t)val;
5854 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5855 }
5856 }
5857
5858 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5859 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5860 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5861 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5862 if (NAME_NOT_FOUND != fwk_testPatternMode) {
5863 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5864 }
5865 int32_t fwk_testPatternData[4];
5866 fwk_testPatternData[0] = testPatternData->r;
5867 fwk_testPatternData[3] = testPatternData->b;
5868 switch (gCamCapability[mCameraId]->color_arrangement) {
5869 case CAM_FILTER_ARRANGEMENT_RGGB:
5870 case CAM_FILTER_ARRANGEMENT_GRBG:
5871 fwk_testPatternData[1] = testPatternData->gr;
5872 fwk_testPatternData[2] = testPatternData->gb;
5873 break;
5874 case CAM_FILTER_ARRANGEMENT_GBRG:
5875 case CAM_FILTER_ARRANGEMENT_BGGR:
5876 fwk_testPatternData[2] = testPatternData->gr;
5877 fwk_testPatternData[1] = testPatternData->gb;
5878 break;
5879 default:
5880 LOGE("color arrangement %d is not supported",
5881 gCamCapability[mCameraId]->color_arrangement);
5882 break;
5883 }
5884 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5885 }
5886
5887 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5888 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5889 }
5890
5891 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5892 String8 str((const char *)gps_methods);
5893 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5894 }
5895
5896 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5897 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5898 }
5899
5900 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5901 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5902 }
5903
5904 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5905 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5906 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5907 }
5908
5909 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5910 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5911 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5912 }
5913
5914 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5915 int32_t fwk_thumb_size[2];
5916 fwk_thumb_size[0] = thumb_size->width;
5917 fwk_thumb_size[1] = thumb_size->height;
5918 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5919 }
5920
5921 // Skip reprocess metadata for high speed mode.
5922 if (mBatchSize == 0) {
5923 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5924 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5925 privateData,
5926 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5927 }
5928 }
5929
5930 if (metadata->is_tuning_params_valid) {
5931 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5932 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5933 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5934
5935
5936 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5937 sizeof(uint32_t));
5938 data += sizeof(uint32_t);
5939
5940 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5941 sizeof(uint32_t));
5942 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5943 data += sizeof(uint32_t);
5944
5945 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5946 sizeof(uint32_t));
5947 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5948 data += sizeof(uint32_t);
5949
5950 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5951 sizeof(uint32_t));
5952 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5953 data += sizeof(uint32_t);
5954
5955 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5956 sizeof(uint32_t));
5957 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5958 data += sizeof(uint32_t);
5959
5960 metadata->tuning_params.tuning_mod3_data_size = 0;
5961 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5962 sizeof(uint32_t));
5963 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5964 data += sizeof(uint32_t);
5965
5966 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5967 TUNING_SENSOR_DATA_MAX);
5968 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5969 count);
5970 data += count;
5971
5972 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5973 TUNING_VFE_DATA_MAX);
5974 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5975 count);
5976 data += count;
5977
5978 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5979 TUNING_CPP_DATA_MAX);
5980 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5981 count);
5982 data += count;
5983
5984 count = MIN(metadata->tuning_params.tuning_cac_data_size,
5985 TUNING_CAC_DATA_MAX);
5986 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5987 count);
5988 data += count;
5989
5990 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5991 (int32_t *)(void *)tuning_meta_data_blob,
5992 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5993 }
5994
5995 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5996 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5997 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5998 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5999 NEUTRAL_COL_POINTS);
6000 }
6001
6002 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6003 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6004 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6005 }
6006
6007 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6008 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6009 // Adjust crop region from sensor output coordinate system to active
6010 // array coordinate system.
6011 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6012 hAeRegions->rect.width, hAeRegions->rect.height);
6013
6014 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6015 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6016 REGIONS_TUPLE_COUNT);
6017 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6018 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6019 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6020 hAeRegions->rect.height);
6021 }
6022
6023 if (!pendingRequest.focusStateSent) {
6024 if (pendingRequest.focusStateValid) {
6025 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
6026 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
6027 } else {
6028 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6029 uint8_t fwk_afState = (uint8_t) *afState;
6030 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
6031 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
6032 }
6033 }
6034 }
6035
6036 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6037 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6038 }
6039
6040 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6041 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6042 }
6043
6044 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6045 uint8_t fwk_lensState = *lensState;
6046 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6047 }
6048
6049 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6050 /*af regions*/
6051 int32_t afRegions[REGIONS_TUPLE_COUNT];
6052 // Adjust crop region from sensor output coordinate system to active
6053 // array coordinate system.
6054 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6055 hAfRegions->rect.width, hAfRegions->rect.height);
6056
6057 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6058 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6059 REGIONS_TUPLE_COUNT);
6060 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6061 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6062 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6063 hAfRegions->rect.height);
6064 }
6065
6066 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
6067 uint32_t ab_mode = *hal_ab_mode;
6068 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6069 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6070 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6071 }
6072 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6073 ab_mode);
6074 if (NAME_NOT_FOUND != val) {
6075 uint8_t fwk_ab_mode = (uint8_t)val;
6076 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6077 }
6078 }
6079
6080 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6081 int val = lookupFwkName(SCENE_MODES_MAP,
6082 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6083 if (NAME_NOT_FOUND != val) {
6084 uint8_t fwkBestshotMode = (uint8_t)val;
6085 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6086 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6087 } else {
6088 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6089 }
6090 }
6091
6092 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6093 uint8_t fwk_mode = (uint8_t) *mode;
6094 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6095 }
6096
6097 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6098 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6099
6100 int32_t hotPixelMap[2];
6101 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6102
6103 // CDS
6104 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6105 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6106 }
6107
6108 // TNR
6109 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6110 uint8_t tnr_enable = tnr->denoise_enable;
6111 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6112
6113 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6114 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6115 }
6116
6117 // Reprocess crop data
6118 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6119 uint8_t cnt = crop_data->num_of_streams;
6120 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6121 // mm-qcamera-daemon only posts crop_data for streams
6122 // not linked to pproc. So no valid crop metadata is not
6123 // necessarily an error case.
6124 LOGD("No valid crop metadata entries");
6125 } else {
6126 uint32_t reproc_stream_id;
6127 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6128 LOGD("No reprocessible stream found, ignore crop data");
6129 } else {
6130 int rc = NO_ERROR;
6131 Vector<int32_t> roi_map;
6132 int32_t *crop = new int32_t[cnt*4];
6133 if (NULL == crop) {
6134 rc = NO_MEMORY;
6135 }
6136 if (NO_ERROR == rc) {
6137 int32_t streams_found = 0;
6138 for (size_t i = 0; i < cnt; i++) {
6139 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6140 if (pprocDone) {
6141 // HAL already does internal reprocessing,
6142 // either via reprocessing before JPEG encoding,
6143 // or offline postprocessing for pproc bypass case.
6144 crop[0] = 0;
6145 crop[1] = 0;
6146 crop[2] = mInputStreamInfo.dim.width;
6147 crop[3] = mInputStreamInfo.dim.height;
6148 } else {
6149 crop[0] = crop_data->crop_info[i].crop.left;
6150 crop[1] = crop_data->crop_info[i].crop.top;
6151 crop[2] = crop_data->crop_info[i].crop.width;
6152 crop[3] = crop_data->crop_info[i].crop.height;
6153 }
6154 roi_map.add(crop_data->crop_info[i].roi_map.left);
6155 roi_map.add(crop_data->crop_info[i].roi_map.top);
6156 roi_map.add(crop_data->crop_info[i].roi_map.width);
6157 roi_map.add(crop_data->crop_info[i].roi_map.height);
6158 streams_found++;
6159 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6160 crop[0], crop[1], crop[2], crop[3]);
6161 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6162 crop_data->crop_info[i].roi_map.left,
6163 crop_data->crop_info[i].roi_map.top,
6164 crop_data->crop_info[i].roi_map.width,
6165 crop_data->crop_info[i].roi_map.height);
6166 break;
6167
6168 }
6169 }
6170 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6171 &streams_found, 1);
6172 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6173 crop, (size_t)(streams_found * 4));
6174 if (roi_map.array()) {
6175 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6176 roi_map.array(), roi_map.size());
6177 }
6178 }
6179 if (crop) {
6180 delete [] crop;
6181 }
6182 }
6183 }
6184 }
6185
6186 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6187 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6188 // so hardcoding the CAC result to OFF mode.
6189 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6190 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6191 } else {
6192 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6193 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6194 *cacMode);
6195 if (NAME_NOT_FOUND != val) {
6196 uint8_t resultCacMode = (uint8_t)val;
6197 // check whether CAC result from CB is equal to Framework set CAC mode
6198 // If not equal then set the CAC mode came in corresponding request
6199 if (pendingRequest.fwkCacMode != resultCacMode) {
6200 resultCacMode = pendingRequest.fwkCacMode;
6201 }
6202 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
6203 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6204 } else {
6205 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6206 }
6207 }
6208 }
6209
6210 // Post blob of cam_cds_data through vendor tag.
6211 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6212 uint8_t cnt = cdsInfo->num_of_streams;
6213 cam_cds_data_t cdsDataOverride;
6214 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6215 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6216 cdsDataOverride.num_of_streams = 1;
6217 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6218 uint32_t reproc_stream_id;
6219 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6220 LOGD("No reprocessible stream found, ignore cds data");
6221 } else {
6222 for (size_t i = 0; i < cnt; i++) {
6223 if (cdsInfo->cds_info[i].stream_id ==
6224 reproc_stream_id) {
6225 cdsDataOverride.cds_info[0].cds_enable =
6226 cdsInfo->cds_info[i].cds_enable;
6227 break;
6228 }
6229 }
6230 }
6231 } else {
6232 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6233 }
6234 camMetadata.update(QCAMERA3_CDS_INFO,
6235 (uint8_t *)&cdsDataOverride,
6236 sizeof(cam_cds_data_t));
6237 }
6238
6239 // Ldaf calibration data
6240 if (!mLdafCalibExist) {
6241 IF_META_AVAILABLE(uint32_t, ldafCalib,
6242 CAM_INTF_META_LDAF_EXIF, metadata) {
6243 mLdafCalibExist = true;
6244 mLdafCalib[0] = ldafCalib[0];
6245 mLdafCalib[1] = ldafCalib[1];
6246 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6247 ldafCalib[0], ldafCalib[1]);
6248 }
6249 }
6250
6251 // AF scene change
6252 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6253 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6254 camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
6255 } else {
6256 uint8_t noSceneChange = 0;
6257 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, &noSceneChange, 1);
6258 camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, &noSceneChange, 1);
6259 LOGE("Missing AF_SCENE_CHANGE metadata!");
6260 }
6261
6262 resultMetadata = camMetadata.release();
6263 return resultMetadata;
6264 }
6265
6266 /*===========================================================================
6267 * FUNCTION : saveExifParams
6268 *
6269 * DESCRIPTION:
6270 *
6271 * PARAMETERS :
6272 * @metadata : metadata information from callback
6273 *
6274 * RETURN : none
6275 *
6276 *==========================================================================*/
saveExifParams(metadata_buffer_t * metadata)6277 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6278 {
6279 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6280 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6281 if (mExifParams.debug_params) {
6282 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6283 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6284 }
6285 }
6286 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6287 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6288 if (mExifParams.debug_params) {
6289 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6290 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6291 }
6292 }
6293 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6294 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6295 if (mExifParams.debug_params) {
6296 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6297 mExifParams.debug_params->af_debug_params_valid = TRUE;
6298 }
6299 }
6300 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6301 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6302 if (mExifParams.debug_params) {
6303 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6304 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6305 }
6306 }
6307 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6308 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6309 if (mExifParams.debug_params) {
6310 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6311 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6312 }
6313 }
6314 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6315 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6316 if (mExifParams.debug_params) {
6317 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6318 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6319 }
6320 }
6321 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6322 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6323 if (mExifParams.debug_params) {
6324 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6325 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6326 }
6327 }
6328 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6329 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6330 if (mExifParams.debug_params) {
6331 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6332 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6333 }
6334 }
6335 }
6336
6337 /*===========================================================================
6338 * FUNCTION : get3AExifParams
6339 *
6340 * DESCRIPTION:
6341 *
6342 * PARAMETERS : none
6343 *
6344 *
6345 * RETURN : mm_jpeg_exif_params_t
6346 *
6347 *==========================================================================*/
get3AExifParams()6348 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6349 {
6350 return mExifParams;
6351 }
6352
6353 /*===========================================================================
6354 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6355 *
6356 * DESCRIPTION:
6357 *
6358 * PARAMETERS :
6359 * @metadata : metadata information from callback
6360 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
6361 * urgent metadata in a batch. Always true for
6362 * non-batch mode.
6363 * @frame_number : frame number for this urgent metadata
6364 *
6365 * RETURN : camera_metadata_t*
6366 * metadata in a format specified by fwk
6367 *==========================================================================*/
6368 camera_metadata_t*
translateCbUrgentMetadataToResultMetadata(metadata_buffer_t * metadata,bool lastUrgentMetadataInBatch,uint32_t frame_number)6369 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6370 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
6371 uint32_t frame_number)
6372 {
6373 CameraMetadata camMetadata;
6374 camera_metadata_t *resultMetadata;
6375
6376 if (!lastUrgentMetadataInBatch) {
6377 /* In batch mode, use empty metadata if this is not the last in batch
6378 */
6379 resultMetadata = allocate_camera_metadata(0, 0);
6380 return resultMetadata;
6381 }
6382
6383 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6384 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6385 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6386 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6387 }
6388
6389 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6390 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6391 &aecTrigger->trigger, 1);
6392 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6393 &aecTrigger->trigger_id, 1);
6394 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6395 aecTrigger->trigger);
6396 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6397 aecTrigger->trigger_id);
6398 }
6399
6400 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6401 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6402 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6403 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6404 }
6405
6406 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6407 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6408 if (NAME_NOT_FOUND != val) {
6409 uint8_t fwkAfMode = (uint8_t)val;
6410 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6411 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6412 } else {
6413 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6414 val);
6415 }
6416 }
6417
6418 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6419 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6420 af_trigger->trigger);
6421 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6422 af_trigger->trigger_id);
6423
6424 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6425 mAfTrigger = *af_trigger;
6426 uint32_t fwk_AfState = (uint32_t) *afState;
6427
6428 // If this is the result for a new trigger, check if there is new early
6429 // af state. If there is, use the last af state for all results
6430 // preceding current partial frame number.
6431 for (auto & pendingRequest : mPendingRequestsList) {
6432 if (pendingRequest.frame_number < frame_number) {
6433 pendingRequest.focusStateValid = true;
6434 pendingRequest.focusState = fwk_AfState;
6435 } else if (pendingRequest.frame_number == frame_number) {
6436 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
6437 // Check if early AF state for trigger exists. If yes, send AF state as
6438 // partial result for better latency.
6439 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
6440 pendingRequest.focusStateSent = true;
6441 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
6442 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
6443 frame_number, fwkEarlyAfState);
6444 }
6445 }
6446 }
6447 }
6448 }
6449 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6450 &mAfTrigger.trigger, 1);
6451 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
6452
6453 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6454 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6455 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6456 if (NAME_NOT_FOUND != val) {
6457 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6458 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6459 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6460 } else {
6461 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6462 }
6463 }
6464
6465 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6466 uint32_t aeMode = CAM_AE_MODE_MAX;
6467 int32_t flashMode = CAM_FLASH_MODE_MAX;
6468 int32_t redeye = -1;
6469 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6470 aeMode = *pAeMode;
6471 }
6472 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6473 flashMode = *pFlashMode;
6474 }
6475 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6476 redeye = *pRedeye;
6477 }
6478
6479 if (1 == redeye) {
6480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6482 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6483 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6484 flashMode);
6485 if (NAME_NOT_FOUND != val) {
6486 fwk_aeMode = (uint8_t)val;
6487 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6488 } else {
6489 LOGE("Unsupported flash mode %d", flashMode);
6490 }
6491 } else if (aeMode == CAM_AE_MODE_ON) {
6492 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6493 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6494 } else if (aeMode == CAM_AE_MODE_OFF) {
6495 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6496 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6497 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
6498 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
6499 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6500 } else {
6501 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6502 "flashMode:%d, aeMode:%u!!!",
6503 redeye, flashMode, aeMode);
6504 }
6505
6506 resultMetadata = camMetadata.release();
6507 return resultMetadata;
6508 }
6509
6510 /*===========================================================================
6511 * FUNCTION : dumpMetadataToFile
6512 *
6513 * DESCRIPTION: Dumps tuning metadata to file system
6514 *
6515 * PARAMETERS :
6516 * @meta : tuning metadata
6517 * @dumpFrameCount : current dump frame count
6518 * @enabled : Enable mask
6519 *
6520 *==========================================================================*/
dumpMetadataToFile(tuning_params_t & meta,uint32_t & dumpFrameCount,bool enabled,const char * type,uint32_t frameNumber)6521 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6522 uint32_t &dumpFrameCount,
6523 bool enabled,
6524 const char *type,
6525 uint32_t frameNumber)
6526 {
6527 //Some sanity checks
6528 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6529 LOGE("Tuning sensor data size bigger than expected %d: %d",
6530 meta.tuning_sensor_data_size,
6531 TUNING_SENSOR_DATA_MAX);
6532 return;
6533 }
6534
6535 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6536 LOGE("Tuning VFE data size bigger than expected %d: %d",
6537 meta.tuning_vfe_data_size,
6538 TUNING_VFE_DATA_MAX);
6539 return;
6540 }
6541
6542 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6543 LOGE("Tuning CPP data size bigger than expected %d: %d",
6544 meta.tuning_cpp_data_size,
6545 TUNING_CPP_DATA_MAX);
6546 return;
6547 }
6548
6549 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6550 LOGE("Tuning CAC data size bigger than expected %d: %d",
6551 meta.tuning_cac_data_size,
6552 TUNING_CAC_DATA_MAX);
6553 return;
6554 }
6555 //
6556
6557 if(enabled){
6558 char timeBuf[FILENAME_MAX];
6559 char buf[FILENAME_MAX];
6560 memset(buf, 0, sizeof(buf));
6561 memset(timeBuf, 0, sizeof(timeBuf));
6562 time_t current_time;
6563 struct tm * timeinfo;
6564 time (¤t_time);
6565 timeinfo = localtime (¤t_time);
6566 if (timeinfo != NULL) {
6567 /* Consistent naming for Jpeg+meta+raw: meta name */
6568 strftime (timeBuf, sizeof(timeBuf),
6569 QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6570 /* Consistent naming for Jpeg+meta+raw: meta name end*/
6571 }
6572 String8 filePath(timeBuf);
6573 /* Consistent naming for Jpeg+meta+raw */
6574 snprintf(buf,
6575 sizeof(buf),
6576 "%dm_%s_%d.bin",
6577 dumpFrameCount,
6578 type,
6579 frameNumber);
6580 /* Consistent naming for Jpeg+meta+raw end */
6581 filePath.append(buf);
6582 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6583 if (file_fd >= 0) {
6584 ssize_t written_len = 0;
6585 meta.tuning_data_version = TUNING_DATA_VERSION;
6586 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6587 written_len += write(file_fd, data, sizeof(uint32_t));
6588 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6589 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6590 written_len += write(file_fd, data, sizeof(uint32_t));
6591 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6592 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6593 written_len += write(file_fd, data, sizeof(uint32_t));
6594 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6595 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6596 written_len += write(file_fd, data, sizeof(uint32_t));
6597 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6598 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6599 written_len += write(file_fd, data, sizeof(uint32_t));
6600 meta.tuning_mod3_data_size = 0;
6601 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6602 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6603 written_len += write(file_fd, data, sizeof(uint32_t));
6604 size_t total_size = meta.tuning_sensor_data_size;
6605 data = (void *)((uint8_t *)&meta.data);
6606 written_len += write(file_fd, data, total_size);
6607 total_size = meta.tuning_vfe_data_size;
6608 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6609 written_len += write(file_fd, data, total_size);
6610 total_size = meta.tuning_cpp_data_size;
6611 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6612 written_len += write(file_fd, data, total_size);
6613 total_size = meta.tuning_cac_data_size;
6614 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6615 written_len += write(file_fd, data, total_size);
6616 close(file_fd);
6617 }else {
6618 LOGE("fail to open file for metadata dumping");
6619 }
6620 }
6621 }
6622
6623 /*===========================================================================
6624 * FUNCTION : cleanAndSortStreamInfo
6625 *
6626 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6627 * and sort them such that raw stream is at the end of the list
6628 * This is a workaround for camera daemon constraint.
6629 *
6630 * PARAMETERS : None
6631 *
6632 *==========================================================================*/
cleanAndSortStreamInfo()6633 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6634 {
6635 List<stream_info_t *> newStreamInfo;
6636
6637 /*clean up invalid streams*/
6638 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6639 it != mStreamInfo.end();) {
6640 if(((*it)->status) == INVALID){
6641 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6642 delete channel;
6643 free(*it);
6644 it = mStreamInfo.erase(it);
6645 } else {
6646 it++;
6647 }
6648 }
6649
6650 // Move preview/video/callback/snapshot streams into newList
6651 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6652 it != mStreamInfo.end();) {
6653 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6654 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6655 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6656 newStreamInfo.push_back(*it);
6657 it = mStreamInfo.erase(it);
6658 } else
6659 it++;
6660 }
6661 // Move raw streams into newList
6662 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6663 it != mStreamInfo.end();) {
6664 newStreamInfo.push_back(*it);
6665 it = mStreamInfo.erase(it);
6666 }
6667
6668 mStreamInfo = newStreamInfo;
6669 }
6670
6671 /*===========================================================================
6672 * FUNCTION : extractJpegMetadata
6673 *
6674 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6675 * JPEG metadata is cached in HAL, and return as part of capture
6676 * result when metadata is returned from camera daemon.
6677 *
6678 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6679 * @request: capture request
6680 *
6681 *==========================================================================*/
extractJpegMetadata(CameraMetadata & jpegMetadata,const camera3_capture_request_t * request)6682 void QCamera3HardwareInterface::extractJpegMetadata(
6683 CameraMetadata& jpegMetadata,
6684 const camera3_capture_request_t *request)
6685 {
6686 CameraMetadata frame_settings;
6687 frame_settings = request->settings;
6688
6689 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6690 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6691 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6692 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6693
6694 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6695 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6696 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6697 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6698
6699 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6700 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6701 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6702 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6703
6704 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6705 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6706 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6707 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6708
6709 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6710 jpegMetadata.update(ANDROID_JPEG_QUALITY,
6711 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6712 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6713
6714 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6715 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6716 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6717 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6718
6719 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6720 int32_t thumbnail_size[2];
6721 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6722 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6723 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6724 int32_t orientation =
6725 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6726 if ((orientation == 90) || (orientation == 270)) {
6727 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6728 int32_t temp;
6729 temp = thumbnail_size[0];
6730 thumbnail_size[0] = thumbnail_size[1];
6731 thumbnail_size[1] = temp;
6732 }
6733 }
6734 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6735 thumbnail_size,
6736 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6737 }
6738
6739 }
6740
6741 /*===========================================================================
6742 * FUNCTION : convertToRegions
6743 *
6744 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6745 *
6746 * PARAMETERS :
6747 * @rect : cam_rect_t struct to convert
6748 * @region : int32_t destination array
6749 * @weight : if we are converting from cam_area_t, weight is valid
6750 * else weight = -1
6751 *
6752 *==========================================================================*/
convertToRegions(cam_rect_t rect,int32_t * region,int weight)6753 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6754 int32_t *region, int weight)
6755 {
6756 region[0] = rect.left;
6757 region[1] = rect.top;
6758 region[2] = rect.left + rect.width;
6759 region[3] = rect.top + rect.height;
6760 if (weight > -1) {
6761 region[4] = weight;
6762 }
6763 }
6764
6765 /*===========================================================================
6766 * FUNCTION : convertFromRegions
6767 *
6768 * DESCRIPTION: helper method to convert from array to cam_rect_t
6769 *
6770 * PARAMETERS :
6771 * @rect : cam_rect_t struct to convert
6772 * @region : int32_t destination array
6773 * @weight : if we are converting from cam_area_t, weight is valid
6774 * else weight = -1
6775 *
6776 *==========================================================================*/
convertFromRegions(cam_area_t & roi,const camera_metadata_t * settings,uint32_t tag)6777 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6778 const camera_metadata_t *settings, uint32_t tag)
6779 {
6780 CameraMetadata frame_settings;
6781 frame_settings = settings;
6782 int32_t x_min = frame_settings.find(tag).data.i32[0];
6783 int32_t y_min = frame_settings.find(tag).data.i32[1];
6784 int32_t x_max = frame_settings.find(tag).data.i32[2];
6785 int32_t y_max = frame_settings.find(tag).data.i32[3];
6786 roi.weight = frame_settings.find(tag).data.i32[4];
6787 roi.rect.left = x_min;
6788 roi.rect.top = y_min;
6789 roi.rect.width = x_max - x_min;
6790 roi.rect.height = y_max - y_min;
6791 }
6792
6793 /*===========================================================================
6794 * FUNCTION : resetIfNeededROI
6795 *
6796 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6797 * crop region
6798 *
6799 * PARAMETERS :
6800 * @roi : cam_area_t struct to resize
6801 * @scalerCropRegion : cam_crop_region_t region to compare against
6802 *
6803 *
6804 *==========================================================================*/
resetIfNeededROI(cam_area_t * roi,const cam_crop_region_t * scalerCropRegion)6805 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6806 const cam_crop_region_t* scalerCropRegion)
6807 {
6808 int32_t roi_x_max = roi->rect.width + roi->rect.left;
6809 int32_t roi_y_max = roi->rect.height + roi->rect.top;
6810 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6811 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6812
6813 /* According to spec weight = 0 is used to indicate roi needs to be disabled
6814 * without having this check the calculations below to validate if the roi
6815 * is inside scalar crop region will fail resulting in the roi not being
6816 * reset causing algorithm to continue to use stale roi window
6817 */
6818 if (roi->weight == 0) {
6819 return true;
6820 }
6821
6822 if ((roi_x_max < scalerCropRegion->left) ||
6823 // right edge of roi window is left of scalar crop's left edge
6824 (roi_y_max < scalerCropRegion->top) ||
6825 // bottom edge of roi window is above scalar crop's top edge
6826 (roi->rect.left > crop_x_max) ||
6827 // left edge of roi window is beyond(right) of scalar crop's right edge
6828 (roi->rect.top > crop_y_max)){
6829 // top edge of roi windo is above scalar crop's top edge
6830 return false;
6831 }
6832 if (roi->rect.left < scalerCropRegion->left) {
6833 roi->rect.left = scalerCropRegion->left;
6834 }
6835 if (roi->rect.top < scalerCropRegion->top) {
6836 roi->rect.top = scalerCropRegion->top;
6837 }
6838 if (roi_x_max > crop_x_max) {
6839 roi_x_max = crop_x_max;
6840 }
6841 if (roi_y_max > crop_y_max) {
6842 roi_y_max = crop_y_max;
6843 }
6844 roi->rect.width = roi_x_max - roi->rect.left;
6845 roi->rect.height = roi_y_max - roi->rect.top;
6846 return true;
6847 }
6848
6849 /*===========================================================================
6850 * FUNCTION : convertLandmarks
6851 *
6852 * DESCRIPTION: helper method to extract the landmarks from face detection info
6853 *
6854 * PARAMETERS :
6855 * @landmark_data : input landmark data to be converted
6856 * @landmarks : int32_t destination array
6857 *
6858 *
6859 *==========================================================================*/
convertLandmarks(cam_face_landmarks_info_t landmark_data,int32_t * landmarks)6860 void QCamera3HardwareInterface::convertLandmarks(
6861 cam_face_landmarks_info_t landmark_data,
6862 int32_t *landmarks)
6863 {
6864 landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6865 landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6866 landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6867 landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6868 landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6869 landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6870 }
6871
6872 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6873 /*===========================================================================
6874 * FUNCTION : initCapabilities
6875 *
6876 * DESCRIPTION: initialize camera capabilities in static data struct
6877 *
6878 * PARAMETERS :
6879 * @cameraId : camera Id
6880 *
6881 * RETURN : int32_t type of status
6882 * NO_ERROR -- success
6883 * none-zero failure code
6884 *==========================================================================*/
initCapabilities(uint32_t cameraId)6885 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6886 {
6887 int rc = 0;
6888 mm_camera_vtbl_t *cameraHandle = NULL;
6889 QCamera3HeapMemory *capabilityHeap = NULL;
6890
6891 rc = camera_open((uint8_t)cameraId, &cameraHandle);
6892 if (rc) {
6893 LOGE("camera_open failed. rc = %d", rc);
6894 goto open_failed;
6895 }
6896 if (!cameraHandle) {
6897 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6898 goto open_failed;
6899 }
6900
6901 capabilityHeap = new QCamera3HeapMemory(1);
6902 if (capabilityHeap == NULL) {
6903 LOGE("creation of capabilityHeap failed");
6904 goto heap_creation_failed;
6905 }
6906 /* Allocate memory for capability buffer */
6907 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6908 if(rc != OK) {
6909 LOGE("No memory for cappability");
6910 goto allocate_failed;
6911 }
6912
6913 /* Map memory for capability buffer */
6914 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6915 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6916 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6917 capabilityHeap->getFd(0),
6918 sizeof(cam_capability_t),
6919 capabilityHeap->getPtr(0));
6920 if(rc < 0) {
6921 LOGE("failed to map capability buffer");
6922 goto map_failed;
6923 }
6924
6925 /* Query Capability */
6926 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6927 if(rc < 0) {
6928 LOGE("failed to query capability");
6929 goto query_failed;
6930 }
6931 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6932 if (!gCamCapability[cameraId]) {
6933 LOGE("out of memory");
6934 goto query_failed;
6935 }
6936 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6937 sizeof(cam_capability_t));
6938
6939 int index;
6940 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6941 cam_analysis_info_t *p_analysis_info =
6942 &gCamCapability[cameraId]->analysis_info[index];
6943 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6944 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6945 }
6946 rc = 0;
6947
6948 query_failed:
6949 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6950 CAM_MAPPING_BUF_TYPE_CAPABILITY);
6951 map_failed:
6952 capabilityHeap->deallocate();
6953 allocate_failed:
6954 delete capabilityHeap;
6955 heap_creation_failed:
6956 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6957 cameraHandle = NULL;
6958 open_failed:
6959 return rc;
6960 }
6961
6962 /*==========================================================================
6963 * FUNCTION : get3Aversion
6964 *
6965 * DESCRIPTION: get the Q3A S/W version
6966 *
6967 * PARAMETERS :
6968 * @sw_version: Reference of Q3A structure which will hold version info upon
6969 * return
6970 *
6971 * RETURN : None
6972 *
6973 *==========================================================================*/
get3AVersion(cam_q3a_version_t & sw_version)6974 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6975 {
6976 if(gCamCapability[mCameraId])
6977 sw_version = gCamCapability[mCameraId]->q3a_version;
6978 else
6979 LOGE("Capability structure NULL!");
6980 }
6981
6982
6983 /*===========================================================================
6984 * FUNCTION : initParameters
6985 *
6986 * DESCRIPTION: initialize camera parameters
6987 *
6988 * PARAMETERS :
6989 *
6990 * RETURN : int32_t type of status
6991 * NO_ERROR -- success
6992 * none-zero failure code
6993 *==========================================================================*/
initParameters()6994 int QCamera3HardwareInterface::initParameters()
6995 {
6996 int rc = 0;
6997
6998 //Allocate Set Param Buffer
6999 mParamHeap = new QCamera3HeapMemory(1);
7000 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7001 if(rc != OK) {
7002 rc = NO_MEMORY;
7003 LOGE("Failed to allocate SETPARM Heap memory");
7004 delete mParamHeap;
7005 mParamHeap = NULL;
7006 return rc;
7007 }
7008
7009 //Map memory for parameters buffer
7010 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7011 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7012 mParamHeap->getFd(0),
7013 sizeof(metadata_buffer_t),
7014 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7015 if(rc < 0) {
7016 LOGE("failed to map SETPARM buffer");
7017 rc = FAILED_TRANSACTION;
7018 mParamHeap->deallocate();
7019 delete mParamHeap;
7020 mParamHeap = NULL;
7021 return rc;
7022 }
7023
7024 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7025
7026 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7027 return rc;
7028 }
7029
7030 /*===========================================================================
7031 * FUNCTION : deinitParameters
7032 *
7033 * DESCRIPTION: de-initialize camera parameters
7034 *
7035 * PARAMETERS :
7036 *
7037 * RETURN : NONE
7038 *==========================================================================*/
deinitParameters()7039 void QCamera3HardwareInterface::deinitParameters()
7040 {
7041 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7042 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7043
7044 mParamHeap->deallocate();
7045 delete mParamHeap;
7046 mParamHeap = NULL;
7047
7048 mParameters = NULL;
7049
7050 free(mPrevParameters);
7051 mPrevParameters = NULL;
7052 }
7053
7054 /*===========================================================================
7055 * FUNCTION : calcMaxJpegSize
7056 *
7057 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7058 *
7059 * PARAMETERS :
7060 *
7061 * RETURN : max_jpeg_size
7062 *==========================================================================*/
calcMaxJpegSize(uint32_t camera_id)7063 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7064 {
7065 size_t max_jpeg_size = 0;
7066 size_t temp_width, temp_height;
7067 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7068 MAX_SIZES_CNT);
7069 for (size_t i = 0; i < count; i++) {
7070 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7071 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7072 if (temp_width * temp_height > max_jpeg_size ) {
7073 max_jpeg_size = temp_width * temp_height;
7074 }
7075 }
7076 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7077 return max_jpeg_size;
7078 }
7079
7080 /*===========================================================================
7081 * FUNCTION : getMaxRawSize
7082 *
7083 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7084 *
7085 * PARAMETERS :
7086 *
7087 * RETURN : Largest supported Raw Dimension
7088 *==========================================================================*/
getMaxRawSize(uint32_t camera_id)7089 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7090 {
7091 int max_width = 0;
7092 cam_dimension_t maxRawSize;
7093
7094 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7095 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7096 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7097 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7098 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7099 }
7100 }
7101 return maxRawSize;
7102 }
7103
7104
7105 /*===========================================================================
7106 * FUNCTION : calcMaxJpegDim
7107 *
7108 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7109 *
7110 * PARAMETERS :
7111 *
7112 * RETURN : max_jpeg_dim
7113 *==========================================================================*/
calcMaxJpegDim()7114 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7115 {
7116 cam_dimension_t max_jpeg_dim;
7117 cam_dimension_t curr_jpeg_dim;
7118 max_jpeg_dim.width = 0;
7119 max_jpeg_dim.height = 0;
7120 curr_jpeg_dim.width = 0;
7121 curr_jpeg_dim.height = 0;
7122 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7123 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7124 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7125 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7126 max_jpeg_dim.width * max_jpeg_dim.height ) {
7127 max_jpeg_dim.width = curr_jpeg_dim.width;
7128 max_jpeg_dim.height = curr_jpeg_dim.height;
7129 }
7130 }
7131 return max_jpeg_dim;
7132 }
7133
7134 /*===========================================================================
7135 * FUNCTION : addStreamConfig
7136 *
7137 * DESCRIPTION: adds the stream configuration to the array
7138 *
7139 * PARAMETERS :
7140 * @available_stream_configs : pointer to stream configuration array
7141 * @scalar_format : scalar format
7142 * @dim : configuration dimension
7143 * @config_type : input or output configuration type
7144 *
7145 * RETURN : NONE
7146 *==========================================================================*/
addStreamConfig(Vector<int32_t> & available_stream_configs,int32_t scalar_format,const cam_dimension_t & dim,int32_t config_type)7147 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7148 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7149 {
7150 available_stream_configs.add(scalar_format);
7151 available_stream_configs.add(dim.width);
7152 available_stream_configs.add(dim.height);
7153 available_stream_configs.add(config_type);
7154 }
7155
7156 /*===========================================================================
7157 * FUNCTION : suppportBurstCapture
7158 *
7159 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7160 *
7161 * PARAMETERS :
7162 * @cameraId : camera Id
7163 *
7164 * RETURN : true if camera supports BURST_CAPTURE
7165 * false otherwise
7166 *==========================================================================*/
supportBurstCapture(uint32_t cameraId)7167 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7168 {
7169 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7170 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7171 const int32_t highResWidth = 3264;
7172 const int32_t highResHeight = 2448;
7173
7174 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7175 // Maximum resolution images cannot be captured at >= 10fps
7176 // -> not supporting BURST_CAPTURE
7177 return false;
7178 }
7179
7180 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7181 // Maximum resolution images can be captured at >= 20fps
7182 // --> supporting BURST_CAPTURE
7183 return true;
7184 }
7185
7186 // Find the smallest highRes resolution, or largest resolution if there is none
7187 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7188 MAX_SIZES_CNT);
7189 size_t highRes = 0;
7190 while ((highRes + 1 < totalCnt) &&
7191 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7192 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7193 highResWidth * highResHeight)) {
7194 highRes++;
7195 }
7196 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7197 return true;
7198 } else {
7199 return false;
7200 }
7201 }
7202
7203 /*===========================================================================
7204 * FUNCTION : initStaticMetadata
7205 *
7206 * DESCRIPTION: initialize the static metadata
7207 *
7208 * PARAMETERS :
7209 * @cameraId : camera Id
7210 *
7211 * RETURN : int32_t type of status
7212 * 0 -- success
7213 * non-zero failure code
7214 *==========================================================================*/
initStaticMetadata(uint32_t cameraId)7215 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7216 {
7217 int rc = 0;
7218 CameraMetadata staticInfo;
7219 size_t count = 0;
7220 bool limitedDevice = false;
7221 char prop[PROPERTY_VALUE_MAX];
7222 bool supportBurst = false;
7223
7224 supportBurst = supportBurstCapture(cameraId);
7225
7226 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7227 * guaranteed or if min fps of max resolution is less than 20 fps, its
7228 * advertised as limited device*/
7229 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7230 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7231 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7232 !supportBurst;
7233
7234 uint8_t supportedHwLvl = limitedDevice ?
7235 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
7236 // LEVEL_3 - This device will support level 3.
7237 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7238
7239 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7240 &supportedHwLvl, 1);
7241
7242 bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
7243 /*HAL 3 only*/
7244 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7245 &gCamCapability[cameraId]->min_focus_distance, 1);
7246
7247 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7248 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7249
7250 /*should be using focal lengths but sensor doesn't provide that info now*/
7251 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7252 &gCamCapability[cameraId]->focal_length,
7253 1);
7254
7255 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7256 gCamCapability[cameraId]->apertures,
7257 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7258
7259 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7260 gCamCapability[cameraId]->filter_densities,
7261 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7262
7263
7264 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7265 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7266 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7267
7268 int32_t lens_shading_map_size[] = {
7269 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7270 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7271 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7272 lens_shading_map_size,
7273 sizeof(lens_shading_map_size)/sizeof(int32_t));
7274
7275 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7276 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7277
7278 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7279 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7280
7281 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7282 &gCamCapability[cameraId]->max_frame_duration, 1);
7283
7284 camera_metadata_rational baseGainFactor = {
7285 gCamCapability[cameraId]->base_gain_factor.numerator,
7286 gCamCapability[cameraId]->base_gain_factor.denominator};
7287 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7288 &baseGainFactor, 1);
7289
7290 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7291 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7292
7293 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7294 gCamCapability[cameraId]->pixel_array_size.height};
7295 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7296 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7297
7298 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7299 gCamCapability[cameraId]->active_array_size.top,
7300 gCamCapability[cameraId]->active_array_size.width,
7301 gCamCapability[cameraId]->active_array_size.height};
7302 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7303 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7304
7305 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7306 &gCamCapability[cameraId]->white_level, 1);
7307
7308 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7309 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7310 gCamCapability[cameraId]->color_arrangement);
7311 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7312 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7313
7314 bool hasBlackRegions = false;
7315 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7316 LOGW("black_region_count: %d is bounded to %d",
7317 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7318 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7319 }
7320 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7321 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7322 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7323 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7324 }
7325 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7326 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7327 hasBlackRegions = true;
7328 }
7329
7330 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7331 &gCamCapability[cameraId]->flash_charge_duration, 1);
7332
7333 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7334 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7335
7336 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
7337 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
7338 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
7339 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7340 ×tampSource, 1);
7341
7342 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7343 &gCamCapability[cameraId]->histogram_size, 1);
7344
7345 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7346 &gCamCapability[cameraId]->max_histogram_count, 1);
7347
7348 int32_t sharpness_map_size[] = {
7349 gCamCapability[cameraId]->sharpness_map_size.width,
7350 gCamCapability[cameraId]->sharpness_map_size.height};
7351
7352 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7353 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7354
7355 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7356 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7357
7358 int32_t scalar_formats[] = {
7359 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7360 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7361 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7362 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7363 HAL_PIXEL_FORMAT_RAW10,
7364 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7365 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7366 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7367 scalar_formats,
7368 scalar_formats_count);
7369
7370 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7371 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7372 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7373 count, MAX_SIZES_CNT, available_processed_sizes);
7374 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7375 available_processed_sizes, count * 2);
7376
7377 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7378 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7379 makeTable(gCamCapability[cameraId]->raw_dim,
7380 count, MAX_SIZES_CNT, available_raw_sizes);
7381 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7382 available_raw_sizes, count * 2);
7383
7384 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7385 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7386 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7387 count, MAX_SIZES_CNT, available_fps_ranges);
7388 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7389 available_fps_ranges, count * 2);
7390
7391 camera_metadata_rational exposureCompensationStep = {
7392 gCamCapability[cameraId]->exp_compensation_step.numerator,
7393 gCamCapability[cameraId]->exp_compensation_step.denominator};
7394 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7395 &exposureCompensationStep, 1);
7396
7397 Vector<uint8_t> availableVstabModes;
7398 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7399 char eis_prop[PROPERTY_VALUE_MAX];
7400 memset(eis_prop, 0, sizeof(eis_prop));
7401 property_get("persist.camera.eis.enable", eis_prop, "0");
7402 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7403 if (facingBack && eis_prop_set) {
7404 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7405 }
7406 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7407 availableVstabModes.array(), availableVstabModes.size());
7408
7409 /*HAL 1 and HAL 3 common*/
7410 float maxZoom = 4;
7411 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7412 &maxZoom, 1);
7413
7414 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7415 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7416
7417 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7418 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7419 max3aRegions[2] = 0; /* AF not supported */
7420 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7421 max3aRegions, 3);
7422
7423 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7424 memset(prop, 0, sizeof(prop));
7425 property_get("persist.camera.facedetect", prop, "1");
7426 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7427 LOGD("Support face detection mode: %d",
7428 supportedFaceDetectMode);
7429
7430 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7431 Vector<uint8_t> availableFaceDetectModes;
7432 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7433 if (supportedFaceDetectMode == 1) {
7434 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7435 } else if (supportedFaceDetectMode == 2) {
7436 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7437 } else if (supportedFaceDetectMode == 3) {
7438 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7439 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7440 } else {
7441 maxFaces = 0;
7442 }
7443 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7444 availableFaceDetectModes.array(),
7445 availableFaceDetectModes.size());
7446 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7447 (int32_t *)&maxFaces, 1);
7448
7449 int32_t exposureCompensationRange[] = {
7450 gCamCapability[cameraId]->exposure_compensation_min,
7451 gCamCapability[cameraId]->exposure_compensation_max};
7452 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7453 exposureCompensationRange,
7454 sizeof(exposureCompensationRange)/sizeof(int32_t));
7455
7456 uint8_t lensFacing = (facingBack) ?
7457 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7458 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7459
7460 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7461 available_thumbnail_sizes,
7462 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7463
7464 /*all sizes will be clubbed into this tag*/
7465 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7466 /*android.scaler.availableStreamConfigurations*/
7467 Vector<int32_t> available_stream_configs;
7468 std::vector<config_entry> stream_configs;
7469 std::unordered_map<config_entry, int32_t, ConfigEntryHash> suggested_configs;
7470 int32_t suggested_proc_formats[] = {
7471 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7472 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7473 size_t suggested_formats_count = sizeof(suggested_proc_formats) /
7474 sizeof(suggested_proc_formats[0]);
7475 cam_dimension_t active_array_dim;
7476 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7477 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7478 int32_t raw_usecase =
7479 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RAW;
7480 int32_t zsl_snapshot_usecase =
7481 (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT) |
7482 (1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL);
7483 int32_t zsl_usecase =
7484 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_ZSL;
7485 /* Add input/output stream configurations for each scalar formats*/
7486 for (size_t j = 0; j < scalar_formats_count; j++) {
7487 switch (scalar_formats[j]) {
7488 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7489 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7490 case HAL_PIXEL_FORMAT_RAW10:
7491 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7492 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7493 addStreamConfig(available_stream_configs, scalar_formats[j],
7494 gCamCapability[cameraId]->raw_dim[i],
7495 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7496 config_entry entry(gCamCapability[cameraId]->raw_dim[i].width,
7497 gCamCapability[cameraId]->raw_dim[i].height, scalar_formats[j],
7498 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7499 stream_configs.push_back(entry);
7500 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_RAW10) ||
7501 (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE)) {
7502 suggested_configs[entry] |= raw_usecase;
7503 }
7504 }
7505 break;
7506 case HAL_PIXEL_FORMAT_BLOB:
7507 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7508 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7509 addStreamConfig(available_stream_configs, scalar_formats[j],
7510 gCamCapability[cameraId]->picture_sizes_tbl[i],
7511 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7512 stream_configs.push_back(config_entry(
7513 gCamCapability[cameraId]->picture_sizes_tbl[i].width,
7514 gCamCapability[cameraId]->picture_sizes_tbl[i].height,
7515 scalar_formats[j],
7516 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT));
7517 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
7518 gCamCapability[cameraId]->picture_sizes_tbl[i].height, scalar_formats[j],
7519 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7520 suggested_configs[entry] |= zsl_snapshot_usecase;
7521 }
7522 break;
7523 case HAL_PIXEL_FORMAT_YCbCr_420_888:
7524 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7525 default:
7526 cam_dimension_t largest_picture_size;
7527 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7528 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7529 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7530 addStreamConfig(available_stream_configs, scalar_formats[j],
7531 gCamCapability[cameraId]->picture_sizes_tbl[i],
7532 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7533 config_entry entry(gCamCapability[cameraId]->picture_sizes_tbl[i].width,
7534 gCamCapability[cameraId]->picture_sizes_tbl[i].height,
7535 scalar_formats[j],
7536 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7537 stream_configs.push_back(entry);
7538 suggested_configs[entry] |= zsl_snapshot_usecase;
7539 /* Book keep largest */
7540 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7541 >= largest_picture_size.width &&
7542 gCamCapability[cameraId]->picture_sizes_tbl[i].height
7543 >= largest_picture_size.height)
7544 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7545 }
7546 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7547 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7548 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7549 addStreamConfig(available_stream_configs, scalar_formats[j],
7550 largest_picture_size,
7551 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7552 config_entry entry(largest_picture_size.width, largest_picture_size.height,
7553 scalar_formats[j],
7554 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7555 suggested_configs[entry] |= zsl_usecase;
7556 }
7557 break;
7558 }
7559 }
7560
7561 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7562 available_stream_configs.array(), available_stream_configs.size());
7563
7564 int32_t preview_usecase =
7565 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW;
7566 for (size_t i = 0; i < gCamCapability[cameraId]->preview_sizes_tbl_cnt; i++) {
7567 for (size_t j = 0; j < suggested_formats_count; j++) {
7568 config_entry entry(gCamCapability[cameraId]->preview_sizes_tbl[i].width,
7569 gCamCapability[cameraId]->preview_sizes_tbl[i].height,
7570 suggested_proc_formats[j],
7571 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7572 if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
7573 stream_configs.end()) {
7574 suggested_configs[entry] |= preview_usecase;
7575 }
7576 }
7577 }
7578
7579 int32_t record_usecase =
7580 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD;
7581 for (size_t i = 0; i < gCamCapability[cameraId]->video_sizes_tbl_cnt; i++) {
7582 for (size_t j = 0; j < suggested_formats_count; j++) {
7583 config_entry entry(gCamCapability[cameraId]->video_sizes_tbl[i].width,
7584 gCamCapability[cameraId]->video_sizes_tbl[i].height,
7585 suggested_proc_formats[j],
7586 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7587 if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
7588 stream_configs.end()) {
7589 suggested_configs[entry] |= record_usecase;
7590 }
7591 }
7592 }
7593
7594 int32_t video_snapshot_usecase =
7595 1 << ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT;
7596 for (size_t i = 0; i < gCamCapability[cameraId]->livesnapshot_sizes_tbl_cnt; i++) {
7597 config_entry entry(gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].width,
7598 gCamCapability[cameraId]->livesnapshot_sizes_tbl[i].height,
7599 HAL_PIXEL_FORMAT_BLOB,
7600 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7601 if (std::find(stream_configs.begin(), stream_configs.end(), entry) !=
7602 stream_configs.end()) {
7603 suggested_configs[entry] |= video_snapshot_usecase;
7604 }
7605 }
7606
7607 std::vector<int32_t> suggested_array;
7608 suggested_array.reserve(suggested_configs.size() * 5);
7609 for (const auto &it : suggested_configs) {
7610 suggested_array.push_back(std::get<0>(it.first));
7611 suggested_array.push_back(std::get<1>(it.first));
7612 suggested_array.push_back(std::get<2>(it.first));
7613 suggested_array.push_back(std::get<3>(it.first));
7614 suggested_array.push_back(it.second);
7615 }
7616
7617 staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
7618 suggested_array.data(), suggested_array.size());
7619
7620 /* android.scaler.availableMinFrameDurations */
7621 Vector<int64_t> available_min_durations;
7622 for (size_t j = 0; j < scalar_formats_count; j++) {
7623 switch (scalar_formats[j]) {
7624 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7625 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7626 case HAL_PIXEL_FORMAT_RAW10:
7627 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7628 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7629 available_min_durations.add(scalar_formats[j]);
7630 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7631 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7632 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7633 }
7634 break;
7635 default:
7636 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7637 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7638 available_min_durations.add(scalar_formats[j]);
7639 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7640 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7641 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7642 }
7643 break;
7644 }
7645 }
7646 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7647 available_min_durations.array(), available_min_durations.size());
7648
7649 Vector<int32_t> available_hfr_configs;
7650 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7651 int32_t fps = 0;
7652 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7653 case CAM_HFR_MODE_60FPS:
7654 fps = 60;
7655 break;
7656 case CAM_HFR_MODE_90FPS:
7657 fps = 90;
7658 break;
7659 case CAM_HFR_MODE_120FPS:
7660 fps = 120;
7661 break;
7662 case CAM_HFR_MODE_150FPS:
7663 fps = 150;
7664 break;
7665 case CAM_HFR_MODE_180FPS:
7666 fps = 180;
7667 break;
7668 case CAM_HFR_MODE_210FPS:
7669 fps = 210;
7670 break;
7671 case CAM_HFR_MODE_240FPS:
7672 fps = 240;
7673 break;
7674 case CAM_HFR_MODE_480FPS:
7675 fps = 480;
7676 break;
7677 case CAM_HFR_MODE_OFF:
7678 case CAM_HFR_MODE_MAX:
7679 default:
7680 break;
7681 }
7682
7683 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7684 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7685 /* For each HFR frame rate, need to advertise one variable fps range
7686 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7687 * and [120, 120]. While camcorder preview alone is running [30, 120] is
7688 * set by the app. When video recording is started, [120, 120] is
7689 * set. This way sensor configuration does not change when recording
7690 * is started */
7691
7692 /* (width, height, fps_min, fps_max, batch_size_max) */
7693 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7694 j < MAX_SIZES_CNT; j++) {
7695 available_hfr_configs.add(
7696 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7697 available_hfr_configs.add(
7698 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7699 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7700 available_hfr_configs.add(fps);
7701 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7702
7703 /* (width, height, fps_min, fps_max, batch_size_max) */
7704 available_hfr_configs.add(
7705 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7706 available_hfr_configs.add(
7707 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7708 available_hfr_configs.add(fps);
7709 available_hfr_configs.add(fps);
7710 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7711 }
7712 }
7713 }
7714 //Advertise HFR capability only if the property is set
7715 memset(prop, 0, sizeof(prop));
7716 property_get("persist.camera.hal3hfr.enable", prop, "1");
7717 uint8_t hfrEnable = (uint8_t)atoi(prop);
7718
7719 if(hfrEnable && available_hfr_configs.array()) {
7720 staticInfo.update(
7721 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7722 available_hfr_configs.array(), available_hfr_configs.size());
7723 }
7724
7725 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7726 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7727 &max_jpeg_size, 1);
7728
7729 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7730 size_t size = 0;
7731 count = CAM_EFFECT_MODE_MAX;
7732 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7733 for (size_t i = 0; i < count; i++) {
7734 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7735 gCamCapability[cameraId]->supported_effects[i]);
7736 if (NAME_NOT_FOUND != val) {
7737 avail_effects[size] = (uint8_t)val;
7738 size++;
7739 }
7740 }
7741 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7742 avail_effects,
7743 size);
7744
7745 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7746 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7747 size_t supported_scene_modes_cnt = 0;
7748 count = CAM_SCENE_MODE_MAX;
7749 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7750 for (size_t i = 0; i < count; i++) {
7751 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7752 CAM_SCENE_MODE_OFF) {
7753 int val = lookupFwkName(SCENE_MODES_MAP,
7754 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7755 gCamCapability[cameraId]->supported_scene_modes[i]);
7756 if (NAME_NOT_FOUND != val) {
7757 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7758 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7759 supported_scene_modes_cnt++;
7760 }
7761 }
7762 }
7763 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7764 avail_scene_modes,
7765 supported_scene_modes_cnt);
7766
7767 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
7768 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7769 supported_scene_modes_cnt,
7770 CAM_SCENE_MODE_MAX,
7771 scene_mode_overrides,
7772 supported_indexes,
7773 cameraId);
7774
7775 if (supported_scene_modes_cnt == 0) {
7776 supported_scene_modes_cnt = 1;
7777 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7778 }
7779
7780 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7781 scene_mode_overrides, supported_scene_modes_cnt * 3);
7782
7783 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7784 ANDROID_CONTROL_MODE_AUTO,
7785 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7786 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7787 available_control_modes,
7788 3);
7789
7790 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7791 size = 0;
7792 count = CAM_ANTIBANDING_MODE_MAX;
7793 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7794 for (size_t i = 0; i < count; i++) {
7795 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7796 gCamCapability[cameraId]->supported_antibandings[i]);
7797 if (NAME_NOT_FOUND != val) {
7798 avail_antibanding_modes[size] = (uint8_t)val;
7799 size++;
7800 }
7801
7802 }
7803 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7804 avail_antibanding_modes,
7805 size);
7806
7807 uint8_t avail_abberation_modes[] = {
7808 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7809 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7810 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7811 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7812 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7813 if (0 == count) {
7814 // If no aberration correction modes are available for a device, this advertise OFF mode
7815 size = 1;
7816 } else {
7817 // If count is not zero then atleast one among the FAST or HIGH quality is supported
7818 // So, advertize all 3 modes if atleast any one mode is supported as per the
7819 // new M requirement
7820 size = 3;
7821 }
7822 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7823 avail_abberation_modes,
7824 size);
7825
7826 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7827 size = 0;
7828 count = CAM_FOCUS_MODE_MAX;
7829 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7830 for (size_t i = 0; i < count; i++) {
7831 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7832 gCamCapability[cameraId]->supported_focus_modes[i]);
7833 if (NAME_NOT_FOUND != val) {
7834 avail_af_modes[size] = (uint8_t)val;
7835 size++;
7836 }
7837 }
7838 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7839 avail_af_modes,
7840 size);
7841
7842 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7843 size = 0;
7844 count = CAM_WB_MODE_MAX;
7845 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7846 for (size_t i = 0; i < count; i++) {
7847 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7848 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7849 gCamCapability[cameraId]->supported_white_balances[i]);
7850 if (NAME_NOT_FOUND != val) {
7851 avail_awb_modes[size] = (uint8_t)val;
7852 size++;
7853 }
7854 }
7855 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7856 avail_awb_modes,
7857 size);
7858
7859 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7860 count = CAM_FLASH_FIRING_LEVEL_MAX;
7861 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7862 count);
7863 for (size_t i = 0; i < count; i++) {
7864 available_flash_levels[i] =
7865 gCamCapability[cameraId]->supported_firing_levels[i];
7866 }
7867 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7868 available_flash_levels, count);
7869
7870 uint8_t flashAvailable;
7871 if (gCamCapability[cameraId]->flash_available)
7872 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7873 else
7874 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7875 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7876 &flashAvailable, 1);
7877
7878 Vector<uint8_t> avail_ae_modes;
7879 count = CAM_AE_MODE_MAX;
7880 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7881 for (size_t i = 0; i < count; i++) {
7882 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
7883 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
7884 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
7885 }
7886 avail_ae_modes.add(aeMode);
7887 }
7888 if (flashAvailable) {
7889 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7890 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7891 }
7892 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7893 avail_ae_modes.array(),
7894 avail_ae_modes.size());
7895
7896 int32_t sensitivity_range[2];
7897 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7898 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7899 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7900 sensitivity_range,
7901 sizeof(sensitivity_range) / sizeof(int32_t));
7902
7903 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7904 &gCamCapability[cameraId]->max_analog_sensitivity,
7905 1);
7906
7907 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7908 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7909 &sensor_orientation,
7910 1);
7911
7912 int32_t max_output_streams[] = {
7913 MAX_STALLING_STREAMS,
7914 MAX_PROCESSED_STREAMS,
7915 MAX_RAW_STREAMS};
7916 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7917 max_output_streams,
7918 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7919
7920 uint8_t avail_leds = 0;
7921 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7922 &avail_leds, 0);
7923
7924 uint8_t focus_dist_calibrated;
7925 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7926 gCamCapability[cameraId]->focus_dist_calibrated);
7927 if (NAME_NOT_FOUND != val) {
7928 focus_dist_calibrated = (uint8_t)val;
7929 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7930 &focus_dist_calibrated, 1);
7931 }
7932
7933 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7934 size = 0;
7935 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7936 MAX_TEST_PATTERN_CNT);
7937 for (size_t i = 0; i < count; i++) {
7938 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7939 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7940 if (NAME_NOT_FOUND != testpatternMode) {
7941 avail_testpattern_modes[size] = testpatternMode;
7942 size++;
7943 }
7944 }
7945 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7946 avail_testpattern_modes,
7947 size);
7948
7949 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7950 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7951 &max_pipeline_depth,
7952 1);
7953
7954 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7955 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7956 &partial_result_count,
7957 1);
7958
7959 int32_t max_stall_duration = MAX_REPROCESS_STALL;
7960 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7961
7962 Vector<uint8_t> available_capabilities;
7963 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7964 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7965 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7966 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7967 if (supportBurst) {
7968 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7969 }
7970 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7971 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7972 if (hfrEnable && available_hfr_configs.array()) {
7973 available_capabilities.add(
7974 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7975 }
7976
7977 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7978 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7979 }
7980 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7981 available_capabilities.array(),
7982 available_capabilities.size());
7983
7984 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7985 //Assumption is that all bayer cameras support MANUAL_SENSOR.
7986 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7987 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7988
7989 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7990 &aeLockAvailable, 1);
7991
7992 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7993 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7994 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7995 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7996
7997 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7998 &awbLockAvailable, 1);
7999
8000 int32_t max_input_streams = 1;
8001 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8002 &max_input_streams,
8003 1);
8004
8005 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8006 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8007 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8008 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8009 HAL_PIXEL_FORMAT_YCbCr_420_888};
8010 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8011 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8012
8013 staticInfo.update(ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
8014 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8015
8016 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8017 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8018 &max_latency,
8019 1);
8020
8021 int32_t isp_sensitivity_range[2];
8022 isp_sensitivity_range[0] =
8023 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8024 isp_sensitivity_range[1] =
8025 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8026 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8027 isp_sensitivity_range,
8028 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8029
8030 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8031 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8032 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8033 available_hot_pixel_modes,
8034 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8035
8036 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8037 ANDROID_SHADING_MODE_FAST,
8038 ANDROID_SHADING_MODE_HIGH_QUALITY};
8039 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8040 available_shading_modes,
8041 3);
8042
8043 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8044 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8045 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8046 available_lens_shading_map_modes,
8047 2);
8048
8049 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8050 ANDROID_EDGE_MODE_FAST,
8051 ANDROID_EDGE_MODE_HIGH_QUALITY,
8052 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8053 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8054 available_edge_modes,
8055 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8056
8057 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8058 ANDROID_NOISE_REDUCTION_MODE_FAST,
8059 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8060 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8061 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8062 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8063 available_noise_red_modes,
8064 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8065
8066 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8067 ANDROID_TONEMAP_MODE_FAST,
8068 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8069 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8070 available_tonemap_modes,
8071 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8072
8073 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8074 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8075 available_hot_pixel_map_modes,
8076 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8077
8078 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8079 gCamCapability[cameraId]->reference_illuminant1);
8080 if (NAME_NOT_FOUND != val) {
8081 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8082 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8083 }
8084
8085 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8086 gCamCapability[cameraId]->reference_illuminant2);
8087 if (NAME_NOT_FOUND != val) {
8088 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8089 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8090 }
8091
8092 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8093 (void *)gCamCapability[cameraId]->forward_matrix1,
8094 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8095
8096 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8097 (void *)gCamCapability[cameraId]->forward_matrix2,
8098 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8099
8100 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8101 (void *)gCamCapability[cameraId]->color_transform1,
8102 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8103
8104 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8105 (void *)gCamCapability[cameraId]->color_transform2,
8106 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8107
8108 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8109 (void *)gCamCapability[cameraId]->calibration_transform1,
8110 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8111
8112 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8113 (void *)gCamCapability[cameraId]->calibration_transform2,
8114 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8115
8116 int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, QCAMERA3_USE_AV_TIMER,
8117 ANDROID_CONTROL_AE_TARGET_FPS_RANGE};
8118
8119 staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
8120 sizeof(session_keys) / sizeof(session_keys[0]));
8121
8122 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8123 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8124 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8125 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8126 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8127 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8128 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8129 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8130 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8131 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8132 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8133 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8134 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8135 ANDROID_JPEG_GPS_COORDINATES,
8136 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8137 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8138 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8139 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8140 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8141 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8142 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8143 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8144 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8145 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
8146 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8147 ANDROID_STATISTICS_FACE_DETECT_MODE,
8148 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8149 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8150 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8151 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
8152 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
8153 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
8154 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
8155 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_AV_TIMER,
8156 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
8157 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
8158 /* DevCamDebug metadata request_keys_basic */
8159 DEVCAMDEBUG_META_ENABLE,
8160 /* DevCamDebug metadata end */
8161 };
8162
8163 size_t request_keys_cnt =
8164 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8165 Vector<int32_t> available_request_keys;
8166 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8167 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8168 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8169 }
8170
8171 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8172 available_request_keys.array(), available_request_keys.size());
8173
8174 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8175 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8176 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8177 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AF_SCENE_CHANGE, ANDROID_CONTROL_AWB_MODE,
8178 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8179 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8180 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8181 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8182 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8183 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8184 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8185 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8186 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8187 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8188 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8189 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8190 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8191 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8192 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8193 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8194 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8195 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
8196 ANDROID_STATISTICS_FACE_SCORES,
8197 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
8198 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
8199 QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
8200 QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
8201 QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
8202 QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
8203 QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
8204 QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
8205 QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
8206 // DevCamDebug metadata result_keys_basic
8207 DEVCAMDEBUG_META_ENABLE,
8208 // DevCamDebug metadata result_keys AF
8209 DEVCAMDEBUG_AF_LENS_POSITION,
8210 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8211 DEVCAMDEBUG_AF_TOF_DISTANCE,
8212 DEVCAMDEBUG_AF_LUMA,
8213 DEVCAMDEBUG_AF_HAF_STATE,
8214 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8215 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8216 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8217 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8218 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8219 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8220 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8221 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8222 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8223 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8224 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8225 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8226 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8227 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8228 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8229 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8230 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8231 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8232 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8233 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8234 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8235 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8236 // DevCamDebug metadata result_keys AEC
8237 DEVCAMDEBUG_AEC_TARGET_LUMA,
8238 DEVCAMDEBUG_AEC_COMP_LUMA,
8239 DEVCAMDEBUG_AEC_AVG_LUMA,
8240 DEVCAMDEBUG_AEC_CUR_LUMA,
8241 DEVCAMDEBUG_AEC_LINECOUNT,
8242 DEVCAMDEBUG_AEC_REAL_GAIN,
8243 DEVCAMDEBUG_AEC_EXP_INDEX,
8244 DEVCAMDEBUG_AEC_LUX_IDX,
8245 // DevCamDebug metadata result_keys AWB
8246 DEVCAMDEBUG_AWB_R_GAIN,
8247 DEVCAMDEBUG_AWB_G_GAIN,
8248 DEVCAMDEBUG_AWB_B_GAIN,
8249 DEVCAMDEBUG_AWB_CCT,
8250 DEVCAMDEBUG_AWB_DECISION,
8251 /* DevCamDebug metadata end */
8252 };
8253 size_t result_keys_cnt =
8254 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8255
8256 Vector<int32_t> available_result_keys;
8257 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8258 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8259 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8260 }
8261 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8262 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8263 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8264 }
8265 if (supportedFaceDetectMode == 1) {
8266 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8267 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8268 } else if ((supportedFaceDetectMode == 2) ||
8269 (supportedFaceDetectMode == 3)) {
8270 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8271 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8272 }
8273 if (hasBlackRegions) {
8274 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8275 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8276 }
8277 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8278 available_result_keys.array(), available_result_keys.size());
8279
8280 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8281 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8282 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8283 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8284 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8285 ANDROID_SCALER_CROPPING_TYPE,
8286 ANDROID_SYNC_MAX_LATENCY,
8287 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8288 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8289 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8290 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8291 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8292 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8293 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8294 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8295 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8296 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8297 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8298 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8299 ANDROID_LENS_FACING,
8300 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8301 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8302 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8303 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8304 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8305 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8306 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8307 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8308 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8309 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8310 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8311 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8312 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8313 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8314 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8315 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8316 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8317 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8318 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8319 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8320 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8321 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8322 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8323 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8324 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8325 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8326 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8327 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8328 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8329 ANDROID_CONTROL_AVAILABLE_MODES,
8330 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8331 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8332 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8333 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8334 ANDROID_SHADING_AVAILABLE_MODES,
8335 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8336 ANDROID_SENSOR_OPAQUE_RAW_SIZE, QCAMERA3_OPAQUE_RAW_FORMAT,
8337 ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
8338 ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP
8339 };
8340
8341 Vector<int32_t> available_characteristics_keys;
8342 available_characteristics_keys.appendArray(characteristics_keys_basic,
8343 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8344 if (hasBlackRegions) {
8345 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8346 }
8347
8348 /*available stall durations depend on the hw + sw and will be different for different devices */
8349 /*have to add for raw after implementation*/
8350 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8351 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8352
8353 Vector<int64_t> available_stall_durations;
8354 for (uint32_t j = 0; j < stall_formats_count; j++) {
8355 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8356 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8357 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8358 available_stall_durations.add(stall_formats[j]);
8359 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8360 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8361 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8362 }
8363 } else {
8364 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8365 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8366 available_stall_durations.add(stall_formats[j]);
8367 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8368 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8369 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8370 }
8371 }
8372 }
8373 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8374 available_stall_durations.array(),
8375 available_stall_durations.size());
8376
8377 //QCAMERA3_OPAQUE_RAW
8378 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8379 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8380 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8381 case LEGACY_RAW:
8382 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8383 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8384 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8385 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8386 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8387 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8388 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8389 break;
8390 case MIPI_RAW:
8391 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8392 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8393 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8394 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8395 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8396 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8397 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8398 break;
8399 default:
8400 LOGE("unknown opaque_raw_format %d",
8401 gCamCapability[cameraId]->opaque_raw_fmt);
8402 break;
8403 }
8404 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8405
8406 Vector<int32_t> strides;
8407 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8408 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8409 cam_stream_buf_plane_info_t buf_planes;
8410 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8411 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8412 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8413 &gCamCapability[cameraId]->padding_info, &buf_planes);
8414 strides.add(buf_planes.plane_info.mp[0].stride);
8415 }
8416
8417 if (!strides.isEmpty()) {
8418 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8419 strides.size());
8420 available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
8421 }
8422 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
8423 available_characteristics_keys.array(),
8424 available_characteristics_keys.size());
8425
8426 Vector<int32_t> opaque_size;
8427 for (size_t j = 0; j < scalar_formats_count; j++) {
8428 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8429 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8430 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8431 cam_stream_buf_plane_info_t buf_planes;
8432
8433 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8434 &gCamCapability[cameraId]->padding_info, &buf_planes);
8435
8436 if (rc == 0) {
8437 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8438 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8439 opaque_size.add(buf_planes.plane_info.frame_len);
8440 }else {
8441 LOGE("raw frame calculation failed!");
8442 }
8443 }
8444 }
8445 }
8446
8447 if ((opaque_size.size() > 0) &&
8448 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8449 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8450 else
8451 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8452
8453 gStaticMetadata[cameraId] = staticInfo.release();
8454 return rc;
8455 }
8456
8457 /*===========================================================================
8458 * FUNCTION : makeTable
8459 *
8460 * DESCRIPTION: make a table of sizes
8461 *
8462 * PARAMETERS :
8463 *
8464 *
8465 *==========================================================================*/
makeTable(cam_dimension_t * dimTable,size_t size,size_t max_size,int32_t * sizeTable)8466 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8467 size_t max_size, int32_t *sizeTable)
8468 {
8469 size_t j = 0;
8470 if (size > max_size) {
8471 size = max_size;
8472 }
8473 for (size_t i = 0; i < size; i++) {
8474 sizeTable[j] = dimTable[i].width;
8475 sizeTable[j+1] = dimTable[i].height;
8476 j+=2;
8477 }
8478 }
8479
8480 /*===========================================================================
8481 * FUNCTION : makeFPSTable
8482 *
8483 * DESCRIPTION: make a table of fps ranges
8484 *
8485 * PARAMETERS :
8486 *
8487 *==========================================================================*/
makeFPSTable(cam_fps_range_t * fpsTable,size_t size,size_t max_size,int32_t * fpsRangesTable)8488 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8489 size_t max_size, int32_t *fpsRangesTable)
8490 {
8491 size_t j = 0;
8492 if (size > max_size) {
8493 size = max_size;
8494 }
8495 for (size_t i = 0; i < size; i++) {
8496 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8497 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8498 j+=2;
8499 }
8500 }
8501
8502 /*===========================================================================
8503 * FUNCTION : makeOverridesList
8504 *
8505 * DESCRIPTION: make a list of scene mode overrides
8506 *
8507 * PARAMETERS :
8508 *
8509 *
8510 *==========================================================================*/
makeOverridesList(cam_scene_mode_overrides_t * overridesTable,size_t size,size_t max_size,uint8_t * overridesList,uint8_t * supported_indexes,uint32_t camera_id)8511 void QCamera3HardwareInterface::makeOverridesList(
8512 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8513 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8514 {
8515 /*daemon will give a list of overrides for all scene modes.
8516 However we should send the fwk only the overrides for the scene modes
8517 supported by the framework*/
8518 size_t j = 0;
8519 if (size > max_size) {
8520 size = max_size;
8521 }
8522 size_t focus_count = CAM_FOCUS_MODE_MAX;
8523 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8524 focus_count);
8525 for (size_t i = 0; i < size; i++) {
8526 bool supt = false;
8527 size_t index = supported_indexes[i];
8528 overridesList[j] = gCamCapability[camera_id]->flash_available ?
8529 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8530 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8531 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8532 overridesTable[index].awb_mode);
8533 if (NAME_NOT_FOUND != val) {
8534 overridesList[j+1] = (uint8_t)val;
8535 }
8536 uint8_t focus_override = overridesTable[index].af_mode;
8537 for (size_t k = 0; k < focus_count; k++) {
8538 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8539 supt = true;
8540 break;
8541 }
8542 }
8543 if (supt) {
8544 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8545 focus_override);
8546 if (NAME_NOT_FOUND != val) {
8547 overridesList[j+2] = (uint8_t)val;
8548 }
8549 } else {
8550 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8551 }
8552 j+=3;
8553 }
8554 }
8555
8556 /*===========================================================================
8557 * FUNCTION : filterJpegSizes
8558 *
8559 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8560 * could be downscaled to
8561 *
8562 * PARAMETERS :
8563 *
8564 * RETURN : length of jpegSizes array
8565 *==========================================================================*/
8566
filterJpegSizes(int32_t * jpegSizes,int32_t * processedSizes,size_t processedSizesCnt,size_t maxCount,cam_rect_t active_array_size,uint8_t downscale_factor)8567 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8568 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8569 uint8_t downscale_factor)
8570 {
8571 if (0 == downscale_factor) {
8572 downscale_factor = 1;
8573 }
8574
8575 int32_t min_width = active_array_size.width / downscale_factor;
8576 int32_t min_height = active_array_size.height / downscale_factor;
8577 size_t jpegSizesCnt = 0;
8578 if (processedSizesCnt > maxCount) {
8579 processedSizesCnt = maxCount;
8580 }
8581 for (size_t i = 0; i < processedSizesCnt; i+=2) {
8582 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8583 jpegSizes[jpegSizesCnt] = processedSizes[i];
8584 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8585 jpegSizesCnt += 2;
8586 }
8587 }
8588 return jpegSizesCnt;
8589 }
8590
8591 /*===========================================================================
8592 * FUNCTION : computeNoiseModelEntryS
8593 *
8594 * DESCRIPTION: function to map a given sensitivity to the S noise
8595 * model parameters in the DNG noise model.
8596 *
8597 * PARAMETERS : sens : the sensor sensitivity
8598 *
8599 ** RETURN : S (sensor amplification) noise
8600 *
8601 *==========================================================================*/
computeNoiseModelEntryS(int32_t sens)8602 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8603 double s = gCamCapability[mCameraId]->gradient_S * sens +
8604 gCamCapability[mCameraId]->offset_S;
8605 return ((s < 0.0) ? 0.0 : s);
8606 }
8607
8608 /*===========================================================================
8609 * FUNCTION : computeNoiseModelEntryO
8610 *
8611 * DESCRIPTION: function to map a given sensitivity to the O noise
8612 * model parameters in the DNG noise model.
8613 *
8614 * PARAMETERS : sens : the sensor sensitivity
8615 *
8616 ** RETURN : O (sensor readout) noise
8617 *
8618 *==========================================================================*/
computeNoiseModelEntryO(int32_t sens)8619 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8620 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8621 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8622 1.0 : (1.0 * sens / max_analog_sens);
8623 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8624 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8625 return ((o < 0.0) ? 0.0 : o);
8626 }
8627
8628 /*===========================================================================
8629 * FUNCTION : getSensorSensitivity
8630 *
8631 * DESCRIPTION: convert iso_mode to an integer value
8632 *
8633 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8634 *
8635 ** RETURN : sensitivity supported by sensor
8636 *
8637 *==========================================================================*/
getSensorSensitivity(int32_t iso_mode)8638 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8639 {
8640 int32_t sensitivity;
8641
8642 switch (iso_mode) {
8643 case CAM_ISO_MODE_100:
8644 sensitivity = 100;
8645 break;
8646 case CAM_ISO_MODE_200:
8647 sensitivity = 200;
8648 break;
8649 case CAM_ISO_MODE_400:
8650 sensitivity = 400;
8651 break;
8652 case CAM_ISO_MODE_800:
8653 sensitivity = 800;
8654 break;
8655 case CAM_ISO_MODE_1600:
8656 sensitivity = 1600;
8657 break;
8658 default:
8659 sensitivity = -1;
8660 break;
8661 }
8662 return sensitivity;
8663 }
8664
8665 /*===========================================================================
8666 * FUNCTION : isStreamCombinationSupported
8667 *
8668 * DESCRIPTION: query camera support for specific stream combination
8669 *
8670 * PARAMETERS :
8671 * @cameraId : camera Id
8672 * @comb : stream combination
8673 *
8674 * RETURN : int type of status
8675 * NO_ERROR -- in case combination is supported
8676 * none-zero failure code
8677 *==========================================================================*/
isStreamCombinationSupported(uint32_t cameraId,const camera_stream_combination_t * comb)8678 int QCamera3HardwareInterface::isStreamCombinationSupported(uint32_t cameraId,
8679 const camera_stream_combination_t *comb)
8680 {
8681 int rc = BAD_VALUE;
8682 pthread_mutex_lock(&gCamLock);
8683
8684 if (NULL == gCamCapability[cameraId]) {
8685 rc = initCapabilities(cameraId);
8686 if (rc < 0) {
8687 pthread_mutex_unlock(&gCamLock);
8688 return rc;
8689 }
8690 }
8691
8692 camera3_stream_configuration_t streamList = {comb->num_streams, /*streams*/ nullptr,
8693 comb->operation_mode, /*session_parameters*/ nullptr};
8694 streamList.streams = new camera3_stream_t * [comb->num_streams];
8695 camera3_stream_t *streamBuffer = new camera3_stream_t[comb->num_streams];
8696 for (size_t i = 0; i < comb->num_streams; i++) {
8697 streamBuffer[i] = {comb->streams[i].stream_type, comb->streams[i].width,
8698 comb->streams[i].height, comb->streams[i].format, comb->streams[i].usage,
8699 /*max_buffers*/ 0, /*priv*/ nullptr, comb->streams[i].data_space,
8700 comb->streams[i].rotation, comb->streams[i].physical_camera_id, /*reserved*/ {nullptr}};
8701 streamList.streams[i] = &streamBuffer[i];
8702 }
8703
8704 StreamValidateStatus validateStatus;
8705 rc = validateStreamCombination(cameraId, &streamList, &validateStatus);
8706
8707 delete [] streamBuffer;
8708 delete [] streamList.streams;
8709 pthread_mutex_unlock(&gCamLock);
8710
8711 return rc;
8712 }
8713
8714 /*===========================================================================
8715 * FUNCTION : getCamInfo
8716 *
8717 * DESCRIPTION: query camera capabilities
8718 *
8719 * PARAMETERS :
8720 * @cameraId : camera Id
8721 * @info : camera info struct to be filled in with camera capabilities
8722 *
8723 * RETURN : int type of status
8724 * NO_ERROR -- success
8725 * none-zero failure code
8726 *==========================================================================*/
getCamInfo(uint32_t cameraId,struct camera_info * info)8727 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8728 struct camera_info *info)
8729 {
8730 ATRACE_CALL();
8731 int rc = 0;
8732
8733 pthread_mutex_lock(&gCamLock);
8734 if (NULL == gCamCapability[cameraId]) {
8735 rc = initCapabilities(cameraId);
8736 if (rc < 0) {
8737 pthread_mutex_unlock(&gCamLock);
8738 return rc;
8739 }
8740 }
8741
8742 if (NULL == gStaticMetadata[cameraId]) {
8743 rc = initStaticMetadata(cameraId);
8744 if (rc < 0) {
8745 pthread_mutex_unlock(&gCamLock);
8746 return rc;
8747 }
8748 }
8749
8750 switch(gCamCapability[cameraId]->position) {
8751 case CAM_POSITION_BACK:
8752 info->facing = CAMERA_FACING_BACK;
8753 break;
8754
8755 case CAM_POSITION_FRONT:
8756 info->facing = CAMERA_FACING_FRONT;
8757 break;
8758
8759 default:
8760 LOGE("Unknown position type for camera id:%d", cameraId);
8761 rc = -1;
8762 break;
8763 }
8764
8765
8766 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8767 info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
8768 info->static_camera_characteristics = gStaticMetadata[cameraId];
8769
8770 //For now assume both cameras can operate independently.
8771 info->conflicting_devices = NULL;
8772 info->conflicting_devices_length = 0;
8773
8774 //resource cost is 100 * MIN(1.0, m/M),
8775 //where m is throughput requirement with maximum stream configuration
8776 //and M is CPP maximum throughput.
8777 float max_fps = 0.0;
8778 for (uint32_t i = 0;
8779 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8780 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8781 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8782 }
8783 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8784 gCamCapability[cameraId]->active_array_size.width *
8785 gCamCapability[cameraId]->active_array_size.height * max_fps /
8786 gCamCapability[cameraId]->max_pixel_bandwidth;
8787 info->resource_cost = 100 * MIN(1.0, ratio);
8788 LOGI("camera %d resource cost is %d", cameraId,
8789 info->resource_cost);
8790
8791 pthread_mutex_unlock(&gCamLock);
8792 return rc;
8793 }
8794
8795 /*===========================================================================
8796 * FUNCTION : translateCapabilityToMetadata
8797 *
8798 * DESCRIPTION: translate the capability into camera_metadata_t
8799 *
8800 * PARAMETERS : type of the request
8801 *
8802 *
8803 * RETURN : success: camera_metadata_t*
8804 * failure: NULL
8805 *
8806 *==========================================================================*/
translateCapabilityToMetadata(int type)8807 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8808 {
8809 if (mDefaultMetadata[type] != NULL) {
8810 return mDefaultMetadata[type];
8811 }
8812 //first time we are handling this request
8813 //fill up the metadata structure using the wrapper class
8814 CameraMetadata settings;
8815 //translate from cam_capability_t to camera_metadata_tag_t
8816 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8817 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8818 int32_t defaultRequestID = 0;
8819 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8820
8821 /* OIS disable */
8822 char ois_prop[PROPERTY_VALUE_MAX];
8823 memset(ois_prop, 0, sizeof(ois_prop));
8824 property_get("persist.camera.ois.disable", ois_prop, "0");
8825 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8826
8827 /* Force video to use OIS */
8828 char videoOisProp[PROPERTY_VALUE_MAX];
8829 memset(videoOisProp, 0, sizeof(videoOisProp));
8830 property_get("persist.camera.ois.video", videoOisProp, "1");
8831 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8832
8833 // Hybrid AE enable/disable
8834 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8835 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8836 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8837 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8838
8839 uint8_t controlIntent = 0;
8840 uint8_t focusMode;
8841 uint8_t vsMode;
8842 uint8_t optStabMode;
8843 uint8_t cacMode;
8844 uint8_t edge_mode;
8845 uint8_t noise_red_mode;
8846 uint8_t shading_mode;
8847 uint8_t hot_pixel_mode;
8848 uint8_t tonemap_mode;
8849 bool highQualityModeEntryAvailable = FALSE;
8850 bool fastModeEntryAvailable = FALSE;
8851 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8852 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8853 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8854
8855 switch (type) {
8856 case CAMERA3_TEMPLATE_PREVIEW:
8857 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8858 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8859 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8860 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8861 edge_mode = ANDROID_EDGE_MODE_FAST;
8862 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8863 shading_mode = ANDROID_SHADING_MODE_FAST;
8864 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8865 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8866 break;
8867 case CAMERA3_TEMPLATE_STILL_CAPTURE:
8868 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8869 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8870 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8871 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8872 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8873 shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
8874 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
8875 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8876 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8877 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8878 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8879 if (gCamCapability[mCameraId]->aberration_modes[i] ==
8880 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8881 highQualityModeEntryAvailable = TRUE;
8882 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8883 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8884 fastModeEntryAvailable = TRUE;
8885 }
8886 }
8887 if (highQualityModeEntryAvailable) {
8888 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8889 } else if (fastModeEntryAvailable) {
8890 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8891 }
8892 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8893 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8894 }
8895 break;
8896 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8897 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8898 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8899 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8900 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8901 edge_mode = ANDROID_EDGE_MODE_FAST;
8902 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8903 shading_mode = ANDROID_SHADING_MODE_FAST;
8904 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8905 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8906 if (forceVideoOis)
8907 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8908 break;
8909 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8910 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8911 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8912 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8913 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8914 edge_mode = ANDROID_EDGE_MODE_FAST;
8915 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8916 shading_mode = ANDROID_SHADING_MODE_FAST;
8917 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8918 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8919 if (forceVideoOis)
8920 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8921 break;
8922 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8923 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8924 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8925 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8926 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8927 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8928 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8929 shading_mode = ANDROID_SHADING_MODE_FAST;
8930 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8931 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8932 break;
8933 case CAMERA3_TEMPLATE_MANUAL:
8934 edge_mode = ANDROID_EDGE_MODE_FAST;
8935 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8936 shading_mode = ANDROID_SHADING_MODE_FAST;
8937 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8938 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8939 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8940 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8941 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8942 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8943 break;
8944 default:
8945 edge_mode = ANDROID_EDGE_MODE_FAST;
8946 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8947 shading_mode = ANDROID_SHADING_MODE_FAST;
8948 hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
8949 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8950 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8951 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8952 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8953 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8954 break;
8955 }
8956 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8957 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8958 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8959 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8960 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8961 }
8962 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8963
8964 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8965 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8966 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8967 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8968 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8969 || ois_disable)
8970 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8971 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8972 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8973
8974 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8975 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8976
8977 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8978 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8979
8980 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8981 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8982
8983 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8984 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8985
8986 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8987 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8988
8989 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8990 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8991
8992 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8993 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8994
8995 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8996 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8997
8998 /*flash*/
8999 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9000 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9001
9002 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9003 settings.update(ANDROID_FLASH_FIRING_POWER,
9004 &flashFiringLevel, 1);
9005
9006 /* lens */
9007 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9008 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9009
9010 if (gCamCapability[mCameraId]->filter_densities_count) {
9011 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9012 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9013 gCamCapability[mCameraId]->filter_densities_count);
9014 }
9015
9016 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9017 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9018
9019 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9020 float default_focus_distance = 0;
9021 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9022 }
9023
9024 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9025 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9026
9027 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9028 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9029
9030 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9031 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9032
9033 /* face detection (default to OFF) */
9034 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9035 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9036
9037 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9038 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9039
9040 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9041 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9042
9043 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9044 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9045
9046
9047 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9048 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9049
9050 /* Exposure time(Update the Min Exposure Time)*/
9051 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9052 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9053
9054 /* frame duration */
9055 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9056 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9057
9058 /* sensitivity */
9059 static const int32_t default_sensitivity = 100;
9060 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
9061 static const int32_t default_isp_sensitivity =
9062 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9063 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9064
9065 /*edge mode*/
9066 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9067
9068 /*noise reduction mode*/
9069 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9070
9071 /*shading mode*/
9072 settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
9073
9074 /*hot pixel mode*/
9075 settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
9076
9077 /*color correction mode*/
9078 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9079 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9080
9081 /*transform matrix mode*/
9082 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9083
9084 int32_t scaler_crop_region[4];
9085 scaler_crop_region[0] = 0;
9086 scaler_crop_region[1] = 0;
9087 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9088 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9089 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9090
9091 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9092 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9093
9094 /*focus distance*/
9095 float focus_distance = 0.0;
9096 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9097
9098 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
9099 /* Restrict default preview template to max 30 fps */
9100 float max_range = 0.0;
9101 float max_fixed_fps = 0.0;
9102 int32_t fps_range[2] = {0, 0};
9103 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9104 i++) {
9105 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9106 TEMPLATE_MAX_PREVIEW_FPS) {
9107 continue;
9108 }
9109 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9110 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9111 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9112 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9113 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9114 if (range > max_range) {
9115 fps_range[0] =
9116 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9117 fps_range[1] =
9118 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9119 max_range = range;
9120 }
9121 } else {
9122 if (range < 0.01 && max_fixed_fps <
9123 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9124 fps_range[0] =
9125 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9126 fps_range[1] =
9127 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9128 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9129 }
9130 }
9131 }
9132 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9133
9134 /*precapture trigger*/
9135 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9136 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9137
9138 /*af trigger*/
9139 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9140 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9141
9142 /* ae & af regions */
9143 int32_t active_region[] = {
9144 gCamCapability[mCameraId]->active_array_size.left,
9145 gCamCapability[mCameraId]->active_array_size.top,
9146 gCamCapability[mCameraId]->active_array_size.left +
9147 gCamCapability[mCameraId]->active_array_size.width,
9148 gCamCapability[mCameraId]->active_array_size.top +
9149 gCamCapability[mCameraId]->active_array_size.height,
9150 0};
9151 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9152 sizeof(active_region) / sizeof(active_region[0]));
9153 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9154 sizeof(active_region) / sizeof(active_region[0]));
9155
9156 /* black level lock */
9157 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9158 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9159
9160 //special defaults for manual template
9161 if (type == CAMERA3_TEMPLATE_MANUAL) {
9162 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9163 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9164
9165 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9166 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9167
9168 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9169 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9170
9171 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9172 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9173
9174 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9175 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9176
9177 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9178 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9179 }
9180
9181
9182 /* TNR
9183 * We'll use this location to determine which modes TNR will be set.
9184 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9185 * This is not to be confused with linking on a per stream basis that decision
9186 * is still on per-session basis and will be handled as part of config stream
9187 */
9188 uint8_t tnr_enable = 0;
9189
9190 if (m_bTnrPreview || m_bTnrVideo) {
9191
9192 switch (type) {
9193 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9194 tnr_enable = 1;
9195 break;
9196
9197 default:
9198 tnr_enable = 0;
9199 break;
9200 }
9201
9202 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9203 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9204 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9205
9206 LOGD("TNR:%d with process plate %d for template:%d",
9207 tnr_enable, tnr_process_type, type);
9208 }
9209
9210 //Update Link tags to default
9211 uint8_t sync_type = CAM_TYPE_STANDALONE;
9212 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9213
9214 uint8_t is_main = 0; //this doesn't matter as app should overwrite
9215 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9216
9217 uint8_t related_camera_id = mCameraId;
9218 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
9219
9220 /* CDS default */
9221 char prop[PROPERTY_VALUE_MAX];
9222 memset(prop, 0, sizeof(prop));
9223 property_get("persist.camera.CDS", prop, "Auto");
9224 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9225 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9226 if (CAM_CDS_MODE_MAX == cds_mode) {
9227 cds_mode = CAM_CDS_MODE_AUTO;
9228 }
9229
9230 /* Disabling CDS in templates which have TNR enabled*/
9231 if (tnr_enable)
9232 cds_mode = CAM_CDS_MODE_OFF;
9233
9234 int32_t mode = cds_mode;
9235 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
9236
9237 /* hybrid ae */
9238 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9239
9240 mDefaultMetadata[type] = settings.release();
9241
9242 return mDefaultMetadata[type];
9243 }
9244
9245 /*===========================================================================
9246 * FUNCTION : setFrameParameters
9247 *
9248 * DESCRIPTION: set parameters per frame as requested in the metadata from
9249 * framework
9250 *
9251 * PARAMETERS :
9252 * @request : request that needs to be serviced
9253 * @streamsArray : Stream ID of all the requested streams
9254 * @blob_request: Whether this request is a blob request or not
9255 *
9256 * RETURN : success: NO_ERROR
9257 * failure:
9258 *==========================================================================*/
setFrameParameters(camera3_capture_request_t * request,cam_stream_ID_t streamsArray,int blob_request,uint32_t snapshotStreamId)9259 int QCamera3HardwareInterface::setFrameParameters(
9260 camera3_capture_request_t *request,
9261 cam_stream_ID_t streamsArray,
9262 int blob_request,
9263 uint32_t snapshotStreamId)
9264 {
9265 /*translate from camera_metadata_t type to parm_type_t*/
9266 int rc = 0;
9267 int32_t hal_version = CAM_HAL_V3;
9268
9269 clear_metadata_buffer(mParameters);
9270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9271 LOGE("Failed to set hal version in the parameters");
9272 return BAD_VALUE;
9273 }
9274
9275 /*we need to update the frame number in the parameters*/
9276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9277 request->frame_number)) {
9278 LOGE("Failed to set the frame number in the parameters");
9279 return BAD_VALUE;
9280 }
9281
9282 /* Update stream id of all the requested buffers */
9283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
9284 LOGE("Failed to set stream type mask in the parameters");
9285 return BAD_VALUE;
9286 }
9287
9288 if (mUpdateDebugLevel) {
9289 uint32_t dummyDebugLevel = 0;
9290 /* The value of dummyDebugLevel is irrelavent. On
9291 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9293 dummyDebugLevel)) {
9294 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9295 return BAD_VALUE;
9296 }
9297 mUpdateDebugLevel = false;
9298 }
9299
9300 if(request->settings != NULL){
9301 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9302 if (blob_request)
9303 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9304 }
9305
9306 return rc;
9307 }
9308
9309 /*===========================================================================
9310 * FUNCTION : setReprocParameters
9311 *
9312 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9313 * return it.
9314 *
9315 * PARAMETERS :
9316 * @request : request that needs to be serviced
9317 *
9318 * RETURN : success: NO_ERROR
9319 * failure:
9320 *==========================================================================*/
setReprocParameters(camera3_capture_request_t * request,metadata_buffer_t * reprocParam,uint32_t snapshotStreamId)9321 int32_t QCamera3HardwareInterface::setReprocParameters(
9322 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9323 uint32_t snapshotStreamId)
9324 {
9325 /*translate from camera_metadata_t type to parm_type_t*/
9326 int rc = 0;
9327
9328 if (NULL == request->settings){
9329 LOGE("Reprocess settings cannot be NULL");
9330 return BAD_VALUE;
9331 }
9332
9333 if (NULL == reprocParam) {
9334 LOGE("Invalid reprocessing metadata buffer");
9335 return BAD_VALUE;
9336 }
9337 clear_metadata_buffer(reprocParam);
9338
9339 /*we need to update the frame number in the parameters*/
9340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9341 request->frame_number)) {
9342 LOGE("Failed to set the frame number in the parameters");
9343 return BAD_VALUE;
9344 }
9345
9346 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9347 if (rc < 0) {
9348 LOGE("Failed to translate reproc request");
9349 return rc;
9350 }
9351
9352 CameraMetadata frame_settings;
9353 frame_settings = request->settings;
9354 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9355 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9356 int32_t *crop_count =
9357 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9358 int32_t *crop_data =
9359 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9360 int32_t *roi_map =
9361 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9362 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9363 cam_crop_data_t crop_meta;
9364 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9365 crop_meta.num_of_streams = 1;
9366 crop_meta.crop_info[0].crop.left = crop_data[0];
9367 crop_meta.crop_info[0].crop.top = crop_data[1];
9368 crop_meta.crop_info[0].crop.width = crop_data[2];
9369 crop_meta.crop_info[0].crop.height = crop_data[3];
9370
9371 crop_meta.crop_info[0].roi_map.left =
9372 roi_map[0];
9373 crop_meta.crop_info[0].roi_map.top =
9374 roi_map[1];
9375 crop_meta.crop_info[0].roi_map.width =
9376 roi_map[2];
9377 crop_meta.crop_info[0].roi_map.height =
9378 roi_map[3];
9379
9380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9381 rc = BAD_VALUE;
9382 }
9383 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9384 request->input_buffer->stream,
9385 crop_meta.crop_info[0].crop.left,
9386 crop_meta.crop_info[0].crop.top,
9387 crop_meta.crop_info[0].crop.width,
9388 crop_meta.crop_info[0].crop.height);
9389 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9390 request->input_buffer->stream,
9391 crop_meta.crop_info[0].roi_map.left,
9392 crop_meta.crop_info[0].roi_map.top,
9393 crop_meta.crop_info[0].roi_map.width,
9394 crop_meta.crop_info[0].roi_map.height);
9395 } else {
9396 LOGE("Invalid reprocess crop count %d!", *crop_count);
9397 }
9398 } else {
9399 LOGE("No crop data from matching output stream");
9400 }
9401
9402 /* These settings are not needed for regular requests so handle them specially for
9403 reprocess requests; information needed for EXIF tags */
9404 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9405 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9406 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9407 if (NAME_NOT_FOUND != val) {
9408 uint32_t flashMode = (uint32_t)val;
9409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9410 rc = BAD_VALUE;
9411 }
9412 } else {
9413 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9414 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9415 }
9416 } else {
9417 LOGH("No flash mode in reprocess settings");
9418 }
9419
9420 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9421 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9423 rc = BAD_VALUE;
9424 }
9425 } else {
9426 LOGH("No flash state in reprocess settings");
9427 }
9428
9429 return rc;
9430 }
9431
9432 /*===========================================================================
9433 * FUNCTION : isEISCropInSnapshotNeeded
9434 *
9435 * DESCRIPTION: In case EIS is active, check whether additional crop is needed
9436 * to avoid FOV jumps in snapshot streams.
9437 *
9438 * PARAMETERS : @metadata: Current request settings.
9439 *
9440 * RETURN : True in case EIS crop is needed, False otherwise.
9441 *==========================================================================*/
isEISCropInSnapshotNeeded(const CameraMetadata & metadata) const9442 bool QCamera3HardwareInterface::isEISCropInSnapshotNeeded(const CameraMetadata &metadata) const
9443 {
9444 if (metadata.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9445 uint8_t vstabMode =
9446 metadata.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9447 if (vstabMode == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) {
9448 if ((mLastEISCropInfo.delta_x != 0) || (mLastEISCropInfo.delta_y != 0) ||
9449 (mLastEISCropInfo.delta_width != 0) || (mLastEISCropInfo.delta_height != 0)) {
9450 return true;
9451 }
9452 }
9453 }
9454
9455 return false;
9456 }
9457
9458 /*===========================================================================
9459 * FUNCTION : isCropValid
9460 *
9461 * DESCRIPTION: Crop sanity checks.
9462 *
9463 * PARAMETERS : @startX: Horizontal crop offset.
9464 * @startY: Vertical crop offset.
9465 * @width: Crop width.
9466 * @height: Crop height.
9467 * @maxWidth: Horizontal maximum size.
9468 * @maxHeight: Vertical maximum size.
9469 *
9470 * RETURN : True in case crop is valid, False otherwise.
9471 *==========================================================================*/
isCropValid(int32_t startX,int32_t startY,int32_t width,int32_t height,int32_t maxWidth,int32_t maxHeight) const9472 bool QCamera3HardwareInterface::isCropValid(int32_t startX, int32_t startY, int32_t width,
9473 int32_t height, int32_t maxWidth, int32_t maxHeight) const
9474 {
9475 if ((startX < 0) || (startY < 0) || (startX >= maxWidth) || (startY >= maxHeight)) {
9476 LOGE("Crop offset is invalid: %dx%d", startX, startY);
9477 return false;
9478 }
9479
9480 if ((width < 0) || (height < 0) || (width >= maxWidth) || (height >= maxHeight)) {
9481 LOGE("Crop dimensions are invalid: %dx%d", width, height);
9482 return false;
9483 }
9484
9485 if (((startX + width) > maxWidth) || ((startY + height) > maxHeight)) {
9486 LOGE("Crop is out of bounds: %dx%d max %dx%d", startX + width, startY + height, maxWidth,
9487 maxHeight);
9488 return false;
9489 }
9490
9491 return true;
9492 }
9493
9494 /*===========================================================================
9495 * FUNCTION : saveRequestSettings
9496 *
9497 * DESCRIPTION: Add any settings that might have changed to the request settings
9498 * and save the settings to be applied on the frame
9499 *
9500 * PARAMETERS :
9501 * @jpegMetadata : the extracted and/or modified jpeg metadata
9502 * @request : request with initial settings
9503 *
9504 * RETURN :
9505 * camera_metadata_t* : pointer to the saved request settings
9506 *==========================================================================*/
saveRequestSettings(const CameraMetadata & jpegMetadata,camera3_capture_request_t * request)9507 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9508 const CameraMetadata &jpegMetadata,
9509 camera3_capture_request_t *request)
9510 {
9511 camera_metadata_t *resultMetadata;
9512 CameraMetadata camMetadata;
9513 camMetadata = request->settings;
9514
9515 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9516 int32_t thumbnail_size[2];
9517 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9518 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9519 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9520 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9521 }
9522
9523 resultMetadata = camMetadata.release();
9524 return resultMetadata;
9525 }
9526
9527 /*===========================================================================
9528 * FUNCTION : setHalFpsRange
9529 *
9530 * DESCRIPTION: set FPS range parameter
9531 *
9532 *
9533 * PARAMETERS :
9534 * @settings : Metadata from framework
9535 * @hal_metadata: Metadata buffer
9536 *
9537 *
9538 * RETURN : success: NO_ERROR
9539 * failure:
9540 *==========================================================================*/
setHalFpsRange(const CameraMetadata & settings,metadata_buffer_t * hal_metadata)9541 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9542 metadata_buffer_t *hal_metadata)
9543 {
9544 int32_t rc = NO_ERROR;
9545 cam_fps_range_t fps_range;
9546 fps_range.min_fps = (float)
9547 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9548 fps_range.max_fps = (float)
9549 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9550 fps_range.video_min_fps = fps_range.min_fps;
9551 fps_range.video_max_fps = fps_range.max_fps;
9552
9553 LOGD("aeTargetFpsRange fps: [%f %f]",
9554 fps_range.min_fps, fps_range.max_fps);
9555 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9556 * follows:
9557 * ---------------------------------------------------------------|
9558 * Video stream is absent in configure_streams |
9559 * (Camcorder preview before the first video record |
9560 * ---------------------------------------------------------------|
9561 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9562 * | | | vid_min/max_fps|
9563 * ---------------------------------------------------------------|
9564 * NO | [ 30, 240] | 240 | [240, 240] |
9565 * |-------------|-------------|----------------|
9566 * | [240, 240] | 240 | [240, 240] |
9567 * ---------------------------------------------------------------|
9568 * Video stream is present in configure_streams |
9569 * ---------------------------------------------------------------|
9570 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9571 * | | | vid_min/max_fps|
9572 * ---------------------------------------------------------------|
9573 * NO | [ 30, 240] | 240 | [240, 240] |
9574 * (camcorder prev |-------------|-------------|----------------|
9575 * after video rec | [240, 240] | 240 | [240, 240] |
9576 * is stopped) | | | |
9577 * ---------------------------------------------------------------|
9578 * YES | [ 30, 240] | 240 | [240, 240] |
9579 * |-------------|-------------|----------------|
9580 * | [240, 240] | 240 | [240, 240] |
9581 * ---------------------------------------------------------------|
9582 * When Video stream is absent in configure_streams,
9583 * preview fps = sensor_fps / batchsize
9584 * Eg: for 240fps at batchSize 4, preview = 60fps
9585 * for 120fps at batchSize 4, preview = 30fps
9586 *
9587 * When video stream is present in configure_streams, preview fps is as per
9588 * the ratio of preview buffers to video buffers requested in process
9589 * capture request
9590 */
9591 mBatchSize = 0;
9592 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9593 fps_range.min_fps = fps_range.video_max_fps;
9594 fps_range.video_min_fps = fps_range.video_max_fps;
9595 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9596 fps_range.max_fps);
9597 if (NAME_NOT_FOUND != val) {
9598 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9600 return BAD_VALUE;
9601 }
9602
9603 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9604 /* If batchmode is currently in progress and the fps changes,
9605 * set the flag to restart the sensor */
9606 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9607 (mHFRVideoFps != fps_range.max_fps)) {
9608 mNeedSensorRestart = true;
9609 }
9610 mHFRVideoFps = fps_range.max_fps;
9611 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9612 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9613 mBatchSize = MAX_HFR_BATCH_SIZE;
9614 }
9615 }
9616 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9617
9618 }
9619 } else {
9620 /* HFR mode is session param in backend/ISP. This should be reset when
9621 * in non-HFR mode */
9622 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9624 return BAD_VALUE;
9625 }
9626 }
9627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9628 return BAD_VALUE;
9629 }
9630 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9631 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9632 return rc;
9633 }
9634
9635 /*===========================================================================
9636 * FUNCTION : translateToHalMetadata
9637 *
9638 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9639 *
9640 *
9641 * PARAMETERS :
9642 * @request : request sent from framework
9643 *
9644 *
9645 * RETURN : success: NO_ERROR
9646 * failure:
9647 *==========================================================================*/
translateToHalMetadata(const camera3_capture_request_t * request,metadata_buffer_t * hal_metadata,uint32_t snapshotStreamId)9648 int QCamera3HardwareInterface::translateToHalMetadata
9649 (const camera3_capture_request_t *request,
9650 metadata_buffer_t *hal_metadata,
9651 uint32_t snapshotStreamId)
9652 {
9653 int rc = 0;
9654 CameraMetadata frame_settings;
9655 frame_settings = request->settings;
9656
9657 /* Do not change the order of the following list unless you know what you are
9658 * doing.
9659 * The order is laid out in such a way that parameters in the front of the table
9660 * may be used to override the parameters later in the table. Examples are:
9661 * 1. META_MODE should precede AEC/AWB/AF MODE
9662 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9663 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9664 * 4. Any mode should precede it's corresponding settings
9665 */
9666 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9667 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9669 rc = BAD_VALUE;
9670 }
9671 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9672 if (rc != NO_ERROR) {
9673 LOGE("extractSceneMode failed");
9674 }
9675 }
9676
9677 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9678 uint8_t fwk_aeMode =
9679 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9680 uint8_t aeMode;
9681 int32_t redeye;
9682
9683 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9684 aeMode = CAM_AE_MODE_OFF;
9685 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
9686 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
9687 } else {
9688 aeMode = CAM_AE_MODE_ON;
9689 }
9690 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9691 redeye = 1;
9692 } else {
9693 redeye = 0;
9694 }
9695
9696 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9697 fwk_aeMode);
9698 if (NAME_NOT_FOUND != val) {
9699 int32_t flashMode = (int32_t)val;
9700 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9701 }
9702
9703 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9705 rc = BAD_VALUE;
9706 }
9707 }
9708
9709 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9710 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9711 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9712 fwk_whiteLevel);
9713 if (NAME_NOT_FOUND != val) {
9714 uint8_t whiteLevel = (uint8_t)val;
9715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9716 rc = BAD_VALUE;
9717 }
9718 }
9719 }
9720
9721 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9722 uint8_t fwk_cacMode =
9723 frame_settings.find(
9724 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9725 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9726 fwk_cacMode);
9727 if (NAME_NOT_FOUND != val) {
9728 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9729 bool entryAvailable = FALSE;
9730 // Check whether Frameworks set CAC mode is supported in device or not
9731 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9732 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9733 entryAvailable = TRUE;
9734 break;
9735 }
9736 }
9737 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9738 // If entry not found then set the device supported mode instead of frameworks mode i.e,
9739 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9740 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9741 if (entryAvailable == FALSE) {
9742 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9743 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9744 } else {
9745 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9746 // High is not supported and so set the FAST as spec say's underlying
9747 // device implementation can be the same for both modes.
9748 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9749 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9750 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9751 // in order to avoid the fps drop due to high quality
9752 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9753 } else {
9754 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9755 }
9756 }
9757 }
9758 LOGD("Final cacMode is %d", cacMode);
9759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9760 rc = BAD_VALUE;
9761 }
9762 } else {
9763 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9764 }
9765 }
9766
9767 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9768 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9769 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9770 fwk_focusMode);
9771 if (NAME_NOT_FOUND != val) {
9772 uint8_t focusMode = (uint8_t)val;
9773 LOGD("set focus mode %d", focusMode);
9774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9775 rc = BAD_VALUE;
9776 }
9777 }
9778 }
9779
9780 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9781 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9783 focalDistance)) {
9784 rc = BAD_VALUE;
9785 }
9786 }
9787
9788 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9789 uint8_t fwk_antibandingMode =
9790 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9791 int val = lookupHalName(ANTIBANDING_MODES_MAP,
9792 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9793 if (NAME_NOT_FOUND != val) {
9794 uint32_t hal_antibandingMode = (uint32_t)val;
9795 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9796 if (m60HzZone) {
9797 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9798 } else {
9799 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9800 }
9801 }
9802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9803 hal_antibandingMode)) {
9804 rc = BAD_VALUE;
9805 }
9806 }
9807 }
9808
9809 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9810 int32_t expCompensation = frame_settings.find(
9811 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9812 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9813 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9814 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9815 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9817 expCompensation)) {
9818 rc = BAD_VALUE;
9819 }
9820 }
9821
9822 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9823 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9825 rc = BAD_VALUE;
9826 }
9827 }
9828 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9829 rc = setHalFpsRange(frame_settings, hal_metadata);
9830 if (rc != NO_ERROR) {
9831 LOGE("setHalFpsRange failed");
9832 }
9833 }
9834
9835 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9836 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9838 rc = BAD_VALUE;
9839 }
9840 }
9841
9842 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9843 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9844 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9845 fwk_effectMode);
9846 if (NAME_NOT_FOUND != val) {
9847 uint8_t effectMode = (uint8_t)val;
9848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9849 rc = BAD_VALUE;
9850 }
9851 }
9852 }
9853
9854 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9855 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9857 colorCorrectMode)) {
9858 rc = BAD_VALUE;
9859 }
9860 }
9861
9862 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9863 cam_color_correct_gains_t colorCorrectGains;
9864 for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9865 colorCorrectGains.gains[i] =
9866 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9867 }
9868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9869 colorCorrectGains)) {
9870 rc = BAD_VALUE;
9871 }
9872 }
9873
9874 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9875 cam_color_correct_matrix_t colorCorrectTransform;
9876 cam_rational_type_t transform_elem;
9877 size_t num = 0;
9878 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9879 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9880 transform_elem.numerator =
9881 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9882 transform_elem.denominator =
9883 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9884 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9885 num++;
9886 }
9887 }
9888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9889 colorCorrectTransform)) {
9890 rc = BAD_VALUE;
9891 }
9892 }
9893
9894 cam_trigger_t aecTrigger;
9895 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9896 aecTrigger.trigger_id = -1;
9897 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9898 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9899 aecTrigger.trigger =
9900 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9901 aecTrigger.trigger_id =
9902 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9904 aecTrigger)) {
9905 rc = BAD_VALUE;
9906 }
9907 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9908 aecTrigger.trigger, aecTrigger.trigger_id);
9909 }
9910
9911 /*af_trigger must come with a trigger id*/
9912 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9913 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9914 cam_trigger_t af_trigger;
9915 af_trigger.trigger =
9916 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9917 af_trigger.trigger_id =
9918 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9920 rc = BAD_VALUE;
9921 }
9922 LOGD("AfTrigger: %d AfTriggerID: %d",
9923 af_trigger.trigger, af_trigger.trigger_id);
9924 }
9925
9926 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9927 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9929 rc = BAD_VALUE;
9930 }
9931 }
9932 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9933 cam_edge_application_t edge_application;
9934 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9935 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9936 edge_application.sharpness = 0;
9937 } else {
9938 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9939 }
9940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9941 rc = BAD_VALUE;
9942 }
9943 }
9944
9945 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9946 int32_t respectFlashMode = 1;
9947 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9948 uint8_t fwk_aeMode =
9949 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9950 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
9951 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
9952 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9953 respectFlashMode = 0;
9954 LOGH("AE Mode controls flash, ignore android.flash.mode");
9955 }
9956 }
9957 if (respectFlashMode) {
9958 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9959 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9960 LOGH("flash mode after mapping %d", val);
9961 // To check: CAM_INTF_META_FLASH_MODE usage
9962 if (NAME_NOT_FOUND != val) {
9963 uint8_t flashMode = (uint8_t)val;
9964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9965 rc = BAD_VALUE;
9966 }
9967 }
9968 }
9969 }
9970
9971 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9972 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9974 rc = BAD_VALUE;
9975 }
9976 }
9977
9978 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9979 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9980 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9981 flashFiringTime)) {
9982 rc = BAD_VALUE;
9983 }
9984 }
9985
9986 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9987 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9989 hotPixelMode)) {
9990 rc = BAD_VALUE;
9991 }
9992 }
9993
9994 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9995 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9997 lensAperture)) {
9998 rc = BAD_VALUE;
9999 }
10000 }
10001
10002 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10003 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10005 filterDensity)) {
10006 rc = BAD_VALUE;
10007 }
10008 }
10009
10010 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10011 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10013 focalLength)) {
10014 rc = BAD_VALUE;
10015 }
10016 }
10017
10018 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10019 uint8_t optStabMode =
10020 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10022 optStabMode)) {
10023 rc = BAD_VALUE;
10024 }
10025 }
10026
10027 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10028 uint8_t videoStabMode =
10029 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10030 LOGD("videoStabMode from APP = %d", videoStabMode);
10031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10032 videoStabMode)) {
10033 rc = BAD_VALUE;
10034 }
10035 }
10036
10037
10038 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10039 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10041 noiseRedMode)) {
10042 rc = BAD_VALUE;
10043 }
10044 }
10045
10046 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10047 float reprocessEffectiveExposureFactor =
10048 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10050 reprocessEffectiveExposureFactor)) {
10051 rc = BAD_VALUE;
10052 }
10053 }
10054
10055 cam_crop_region_t scalerCropRegion;
10056 bool scalerCropSet = false;
10057 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10058 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10059 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10060 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10061 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10062
10063 // Map coordinate system from active array to sensor output.
10064 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10065 scalerCropRegion.width, scalerCropRegion.height);
10066
10067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10068 scalerCropRegion)) {
10069 rc = BAD_VALUE;
10070 }
10071 scalerCropSet = true;
10072 }
10073
10074 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10075 int64_t sensorExpTime =
10076 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10077 LOGD("setting sensorExpTime %lld", sensorExpTime);
10078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10079 sensorExpTime)) {
10080 rc = BAD_VALUE;
10081 }
10082 }
10083
10084 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10085 int64_t sensorFrameDuration =
10086 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10087 int64_t minFrameDuration = getMinFrameDuration(request);
10088 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10089 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10090 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10091 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10093 sensorFrameDuration)) {
10094 rc = BAD_VALUE;
10095 }
10096 }
10097
10098 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10099 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10100 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10101 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10102 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10103 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10104 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10106 sensorSensitivity)) {
10107 rc = BAD_VALUE;
10108 }
10109 }
10110
10111 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10112 int32_t ispSensitivity =
10113 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10114 if (ispSensitivity <
10115 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10116 ispSensitivity =
10117 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10118 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10119 }
10120 if (ispSensitivity >
10121 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10122 ispSensitivity =
10123 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10124 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10125 }
10126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10127 ispSensitivity)) {
10128 rc = BAD_VALUE;
10129 }
10130 }
10131
10132 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10133 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10135 rc = BAD_VALUE;
10136 }
10137 }
10138
10139 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10140 uint8_t fwk_facedetectMode =
10141 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10142
10143 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10144 fwk_facedetectMode);
10145
10146 if (NAME_NOT_FOUND != val) {
10147 uint8_t facedetectMode = (uint8_t)val;
10148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10149 facedetectMode)) {
10150 rc = BAD_VALUE;
10151 }
10152 }
10153 }
10154
10155 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10156 uint8_t histogramMode =
10157 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10158 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10159 histogramMode)) {
10160 rc = BAD_VALUE;
10161 }
10162 }
10163
10164 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10165 uint8_t sharpnessMapMode =
10166 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10168 sharpnessMapMode)) {
10169 rc = BAD_VALUE;
10170 }
10171 }
10172
10173 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10174 uint8_t tonemapMode =
10175 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10177 rc = BAD_VALUE;
10178 }
10179 }
10180 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10181 /*All tonemap channels will have the same number of points*/
10182 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10183 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10184 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10185 cam_rgb_tonemap_curves tonemapCurves;
10186 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10187 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10188 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10189 tonemapCurves.tonemap_points_cnt,
10190 CAM_MAX_TONEMAP_CURVE_SIZE);
10191 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10192 }
10193
10194 /* ch0 = G*/
10195 size_t point = 0;
10196 cam_tonemap_curve_t tonemapCurveGreen;
10197 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10198 for (size_t j = 0; j < 2; j++) {
10199 tonemapCurveGreen.tonemap_points[i][j] =
10200 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10201 point++;
10202 }
10203 }
10204 tonemapCurves.curves[0] = tonemapCurveGreen;
10205
10206 /* ch 1 = B */
10207 point = 0;
10208 cam_tonemap_curve_t tonemapCurveBlue;
10209 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10210 for (size_t j = 0; j < 2; j++) {
10211 tonemapCurveBlue.tonemap_points[i][j] =
10212 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10213 point++;
10214 }
10215 }
10216 tonemapCurves.curves[1] = tonemapCurveBlue;
10217
10218 /* ch 2 = R */
10219 point = 0;
10220 cam_tonemap_curve_t tonemapCurveRed;
10221 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10222 for (size_t j = 0; j < 2; j++) {
10223 tonemapCurveRed.tonemap_points[i][j] =
10224 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10225 point++;
10226 }
10227 }
10228 tonemapCurves.curves[2] = tonemapCurveRed;
10229
10230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10231 tonemapCurves)) {
10232 rc = BAD_VALUE;
10233 }
10234 }
10235
10236 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10237 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10239 captureIntent)) {
10240 rc = BAD_VALUE;
10241 }
10242 }
10243
10244 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10245 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10247 blackLevelLock)) {
10248 rc = BAD_VALUE;
10249 }
10250 }
10251
10252 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10253 uint8_t lensShadingMapMode =
10254 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10256 lensShadingMapMode)) {
10257 rc = BAD_VALUE;
10258 }
10259 }
10260
10261 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10262 cam_area_t roi;
10263 bool reset = true;
10264 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10265
10266 // Map coordinate system from active array to sensor output.
10267 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10268 roi.rect.height);
10269
10270 if (scalerCropSet) {
10271 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10272 }
10273 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10274 rc = BAD_VALUE;
10275 }
10276 }
10277
10278 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10279 cam_area_t roi;
10280 bool reset = true;
10281 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10282
10283 // Map coordinate system from active array to sensor output.
10284 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10285 roi.rect.height);
10286
10287 if (scalerCropSet) {
10288 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10289 }
10290 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10291 rc = BAD_VALUE;
10292 }
10293 }
10294
10295 // CDS for non-HFR non-video mode
10296 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10297 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10298 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10299 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10300 LOGE("Invalid CDS mode %d!", *fwk_cds);
10301 } else {
10302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10303 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10304 rc = BAD_VALUE;
10305 }
10306 }
10307 }
10308
10309 // TNR
10310 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10311 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10312 uint8_t b_TnrRequested = 0;
10313 cam_denoise_param_t tnr;
10314 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10315 tnr.process_plates =
10316 (cam_denoise_process_type_t)frame_settings.find(
10317 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10318 b_TnrRequested = tnr.denoise_enable;
10319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10320 rc = BAD_VALUE;
10321 }
10322 }
10323
10324 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10325 int32_t fwk_testPatternMode =
10326 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10327 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10328 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10329
10330 if (NAME_NOT_FOUND != testPatternMode) {
10331 cam_test_pattern_data_t testPatternData;
10332 memset(&testPatternData, 0, sizeof(testPatternData));
10333 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10334 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10335 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10336 int32_t *fwk_testPatternData =
10337 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10338 testPatternData.r = fwk_testPatternData[0];
10339 testPatternData.b = fwk_testPatternData[3];
10340 switch (gCamCapability[mCameraId]->color_arrangement) {
10341 case CAM_FILTER_ARRANGEMENT_RGGB:
10342 case CAM_FILTER_ARRANGEMENT_GRBG:
10343 testPatternData.gr = fwk_testPatternData[1];
10344 testPatternData.gb = fwk_testPatternData[2];
10345 break;
10346 case CAM_FILTER_ARRANGEMENT_GBRG:
10347 case CAM_FILTER_ARRANGEMENT_BGGR:
10348 testPatternData.gr = fwk_testPatternData[2];
10349 testPatternData.gb = fwk_testPatternData[1];
10350 break;
10351 default:
10352 LOGE("color arrangement %d is not supported",
10353 gCamCapability[mCameraId]->color_arrangement);
10354 break;
10355 }
10356 }
10357 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
10358 testPatternData)) {
10359 rc = BAD_VALUE;
10360 }
10361 } else {
10362 LOGE("Invalid framework sensor test pattern mode %d",
10363 fwk_testPatternMode);
10364 }
10365 }
10366
10367 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10368 size_t count = 0;
10369 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
10370 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
10371 gps_coords.data.d, gps_coords.count, count);
10372 if (gps_coords.count != count) {
10373 rc = BAD_VALUE;
10374 }
10375 }
10376
10377 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
10378 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
10379 size_t count = 0;
10380 const char *gps_methods_src = (const char *)
10381 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
10382 memset(gps_methods, '\0', sizeof(gps_methods));
10383 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
10384 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
10385 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
10386 if (GPS_PROCESSING_METHOD_SIZE != count) {
10387 rc = BAD_VALUE;
10388 }
10389 }
10390
10391 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
10392 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
10393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
10394 gps_timestamp)) {
10395 rc = BAD_VALUE;
10396 }
10397 }
10398
10399 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
10400 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
10401 cam_rotation_info_t rotation_info;
10402 if (orientation == 0) {
10403 rotation_info.rotation = ROTATE_0;
10404 } else if (orientation == 90) {
10405 rotation_info.rotation = ROTATE_90;
10406 } else if (orientation == 180) {
10407 rotation_info.rotation = ROTATE_180;
10408 } else if (orientation == 270) {
10409 rotation_info.rotation = ROTATE_270;
10410 }
10411 rotation_info.device_rotation = ROTATE_0;
10412 rotation_info.streamId = snapshotStreamId;
10413 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
10414 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
10415 rc = BAD_VALUE;
10416 }
10417 }
10418
10419 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
10420 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
10421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
10422 rc = BAD_VALUE;
10423 }
10424 }
10425
10426 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
10427 uint32_t thumb_quality = (uint32_t)
10428 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
10429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
10430 thumb_quality)) {
10431 rc = BAD_VALUE;
10432 }
10433 }
10434
10435 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10436 cam_dimension_t dim;
10437 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10438 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
10440 rc = BAD_VALUE;
10441 }
10442 }
10443
10444 // Internal metadata
10445 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
10446 size_t count = 0;
10447 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
10448 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
10449 privatedata.data.i32, privatedata.count, count);
10450 if (privatedata.count != count) {
10451 rc = BAD_VALUE;
10452 }
10453 }
10454
10455 // EV step
10456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
10457 gCamCapability[mCameraId]->exp_compensation_step)) {
10458 rc = BAD_VALUE;
10459 }
10460
10461 // CDS info
10462 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
10463 cam_cds_data_t *cdsData = (cam_cds_data_t *)
10464 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
10465
10466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10467 CAM_INTF_META_CDS_DATA, *cdsData)) {
10468 rc = BAD_VALUE;
10469 }
10470 }
10471
10472 // Hybrid AE
10473 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
10474 uint8_t *hybrid_ae = (uint8_t *)
10475 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
10476
10477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10478 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
10479 rc = BAD_VALUE;
10480 }
10481 }
10482
10483 return rc;
10484 }
10485
10486 /*===========================================================================
10487 * FUNCTION : captureResultCb
10488 *
10489 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
10490 *
10491 * PARAMETERS :
10492 * @frame : frame information from mm-camera-interface
10493 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
10494 * @userdata: userdata
10495 *
10496 * RETURN : NONE
10497 *==========================================================================*/
captureResultCb(mm_camera_super_buf_t * metadata,camera3_stream_buffer_t * buffer,uint32_t frame_number,bool isInputBuffer,void * userdata)10498 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
10499 camera3_stream_buffer_t *buffer,
10500 uint32_t frame_number, bool isInputBuffer, void *userdata)
10501 {
10502 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10503 if (hw == NULL) {
10504 LOGE("Invalid hw %p", hw);
10505 return;
10506 }
10507
10508 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10509 return;
10510 }
10511
10512 /*===========================================================================
10513 * FUNCTION : setBufferErrorStatus
10514 *
10515 * DESCRIPTION: Callback handler for channels to report any buffer errors
10516 *
10517 * PARAMETERS :
10518 * @ch : Channel on which buffer error is reported from
10519 * @frame_number : frame number on which buffer error is reported on
10520 * @buffer_status : buffer error status
10521 * @userdata: userdata
10522 *
10523 * RETURN : NONE
10524 *==========================================================================*/
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frame_number,camera3_buffer_status_t err,void * userdata)10525 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10526 uint32_t frame_number, camera3_buffer_status_t err,
10527 void *userdata)
10528 {
10529 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10530 if (hw == NULL) {
10531 LOGE("Invalid hw %p", hw);
10532 return;
10533 }
10534
10535 hw->setBufferErrorStatus(ch, frame_number, err);
10536 return;
10537 }
10538
setBufferErrorStatus(QCamera3Channel * ch,uint32_t frameNumber,camera3_buffer_status_t err)10539 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10540 uint32_t frameNumber, camera3_buffer_status_t err)
10541 {
10542 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10543 pthread_mutex_lock(&mMutex);
10544
10545 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10546 if (req.frame_number != frameNumber)
10547 continue;
10548 for (auto& k : req.mPendingBufferList) {
10549 if(k.stream->priv == ch) {
10550 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10551 }
10552 }
10553 }
10554
10555 pthread_mutex_unlock(&mMutex);
10556 return;
10557 }
10558 /*===========================================================================
10559 * FUNCTION : initialize
10560 *
10561 * DESCRIPTION: Pass framework callback pointers to HAL
10562 *
10563 * PARAMETERS :
10564 *
10565 *
10566 * RETURN : Success : 0
10567 * Failure: -ENODEV
10568 *==========================================================================*/
10569
initialize(const struct camera3_device * device,const camera3_callback_ops_t * callback_ops)10570 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10571 const camera3_callback_ops_t *callback_ops)
10572 {
10573 LOGD("E");
10574 QCamera3HardwareInterface *hw =
10575 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10576 if (!hw) {
10577 LOGE("NULL camera device");
10578 return -ENODEV;
10579 }
10580
10581 int rc = hw->initialize(callback_ops);
10582 LOGD("X");
10583 return rc;
10584 }
10585
10586 /*===========================================================================
10587 * FUNCTION : configure_streams
10588 *
10589 * DESCRIPTION:
10590 *
10591 * PARAMETERS :
10592 *
10593 *
10594 * RETURN : Success: 0
10595 * Failure: -EINVAL (if stream configuration is invalid)
10596 * -ENODEV (fatal error)
10597 *==========================================================================*/
10598
configure_streams(const struct camera3_device * device,camera3_stream_configuration_t * stream_list)10599 int QCamera3HardwareInterface::configure_streams(
10600 const struct camera3_device *device,
10601 camera3_stream_configuration_t *stream_list)
10602 {
10603 LOGD("E");
10604 QCamera3HardwareInterface *hw =
10605 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10606 if (!hw) {
10607 LOGE("NULL camera device");
10608 return -ENODEV;
10609 }
10610 int rc = hw->configureStreams(stream_list);
10611 LOGD("X");
10612 return rc;
10613 }
10614
10615 /*===========================================================================
10616 * FUNCTION : construct_default_request_settings
10617 *
10618 * DESCRIPTION: Configure a settings buffer to meet the required use case
10619 *
10620 * PARAMETERS :
10621 *
10622 *
10623 * RETURN : Success: Return valid metadata
10624 * Failure: Return NULL
10625 *==========================================================================*/
10626 const camera_metadata_t* QCamera3HardwareInterface::
construct_default_request_settings(const struct camera3_device * device,int type)10627 construct_default_request_settings(const struct camera3_device *device,
10628 int type)
10629 {
10630
10631 LOGD("E");
10632 camera_metadata_t* fwk_metadata = NULL;
10633 QCamera3HardwareInterface *hw =
10634 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10635 if (!hw) {
10636 LOGE("NULL camera device");
10637 return NULL;
10638 }
10639
10640 fwk_metadata = hw->translateCapabilityToMetadata(type);
10641
10642 LOGD("X");
10643 return fwk_metadata;
10644 }
10645
10646 /*===========================================================================
10647 * FUNCTION : process_capture_request
10648 *
10649 * DESCRIPTION:
10650 *
10651 * PARAMETERS :
10652 *
10653 *
10654 * RETURN :
10655 *==========================================================================*/
process_capture_request(const struct camera3_device * device,camera3_capture_request_t * request)10656 int QCamera3HardwareInterface::process_capture_request(
10657 const struct camera3_device *device,
10658 camera3_capture_request_t *request)
10659 {
10660 LOGD("E");
10661 QCamera3HardwareInterface *hw =
10662 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10663 if (!hw) {
10664 LOGE("NULL camera device");
10665 return -EINVAL;
10666 }
10667
10668 int rc = hw->processCaptureRequest(request);
10669 LOGD("X");
10670 return rc;
10671 }
10672
10673 /*===========================================================================
10674 * FUNCTION : dump
10675 *
10676 * DESCRIPTION:
10677 *
10678 * PARAMETERS :
10679 *
10680 *
10681 * RETURN :
10682 *==========================================================================*/
10683
dump(const struct camera3_device * device,int fd)10684 void QCamera3HardwareInterface::dump(
10685 const struct camera3_device *device, int fd)
10686 {
10687 /* Log level property is read when "adb shell dumpsys media.camera" is
10688 called so that the log level can be controlled without restarting
10689 the media server */
10690 getLogLevel();
10691
10692 LOGD("E");
10693 QCamera3HardwareInterface *hw =
10694 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10695 if (!hw) {
10696 LOGE("NULL camera device");
10697 return;
10698 }
10699
10700 hw->dump(fd);
10701 LOGD("X");
10702 return;
10703 }
10704
10705 /*===========================================================================
10706 * FUNCTION : flush
10707 *
10708 * DESCRIPTION:
10709 *
10710 * PARAMETERS :
10711 *
10712 *
10713 * RETURN :
10714 *==========================================================================*/
10715
flush(const struct camera3_device * device)10716 int QCamera3HardwareInterface::flush(
10717 const struct camera3_device *device)
10718 {
10719 int rc;
10720 LOGD("E");
10721 QCamera3HardwareInterface *hw =
10722 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10723 if (!hw) {
10724 LOGE("NULL camera device");
10725 return -EINVAL;
10726 }
10727
10728 pthread_mutex_lock(&hw->mMutex);
10729 // Validate current state
10730 switch (hw->mState) {
10731 case STARTED:
10732 /* valid state */
10733 break;
10734
10735 case ERROR:
10736 pthread_mutex_unlock(&hw->mMutex);
10737 hw->handleCameraDeviceError();
10738 return -ENODEV;
10739
10740 default:
10741 LOGI("Flush returned during state %d", hw->mState);
10742 pthread_mutex_unlock(&hw->mMutex);
10743 return 0;
10744 }
10745 pthread_mutex_unlock(&hw->mMutex);
10746
10747 rc = hw->flush(true /* restart channels */ );
10748 LOGD("X");
10749 return rc;
10750 }
10751
10752 /*===========================================================================
10753 * FUNCTION : close_camera_device
10754 *
10755 * DESCRIPTION:
10756 *
10757 * PARAMETERS :
10758 *
10759 *
10760 * RETURN :
10761 *==========================================================================*/
close_camera_device(struct hw_device_t * device)10762 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10763 {
10764 int ret = NO_ERROR;
10765 QCamera3HardwareInterface *hw =
10766 reinterpret_cast<QCamera3HardwareInterface *>(
10767 reinterpret_cast<camera3_device_t *>(device)->priv);
10768 if (!hw) {
10769 LOGE("NULL camera device");
10770 return BAD_VALUE;
10771 }
10772
10773 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10774 delete hw;
10775 LOGI("[KPI Perf]: X");
10776 return ret;
10777 }
10778
10779 /*===========================================================================
10780 * FUNCTION : getWaveletDenoiseProcessPlate
10781 *
10782 * DESCRIPTION: query wavelet denoise process plate
10783 *
10784 * PARAMETERS : None
10785 *
10786 * RETURN : WNR prcocess plate value
10787 *==========================================================================*/
getWaveletDenoiseProcessPlate()10788 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10789 {
10790 char prop[PROPERTY_VALUE_MAX];
10791 memset(prop, 0, sizeof(prop));
10792 property_get("persist.denoise.process.plates", prop, "0");
10793 int processPlate = atoi(prop);
10794 switch(processPlate) {
10795 case 0:
10796 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10797 case 1:
10798 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10799 case 2:
10800 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10801 case 3:
10802 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10803 default:
10804 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10805 }
10806 }
10807
10808
10809 /*===========================================================================
10810 * FUNCTION : getTemporalDenoiseProcessPlate
10811 *
10812 * DESCRIPTION: query temporal denoise process plate
10813 *
10814 * PARAMETERS : None
10815 *
10816 * RETURN : TNR prcocess plate value
10817 *==========================================================================*/
getTemporalDenoiseProcessPlate()10818 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10819 {
10820 char prop[PROPERTY_VALUE_MAX];
10821 memset(prop, 0, sizeof(prop));
10822 property_get("persist.tnr.process.plates", prop, "0");
10823 int processPlate = atoi(prop);
10824 switch(processPlate) {
10825 case 0:
10826 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10827 case 1:
10828 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10829 case 2:
10830 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10831 case 3:
10832 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10833 default:
10834 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10835 }
10836 }
10837
10838
10839 /*===========================================================================
10840 * FUNCTION : extractSceneMode
10841 *
10842 * DESCRIPTION: Extract scene mode from frameworks set metadata
10843 *
10844 * PARAMETERS :
10845 * @frame_settings: CameraMetadata reference
10846 * @metaMode: ANDROID_CONTORL_MODE
10847 * @hal_metadata: hal metadata structure
10848 *
10849 * RETURN : None
10850 *==========================================================================*/
extractSceneMode(const CameraMetadata & frame_settings,uint8_t metaMode,metadata_buffer_t * hal_metadata)10851 int32_t QCamera3HardwareInterface::extractSceneMode(
10852 const CameraMetadata &frame_settings, uint8_t metaMode,
10853 metadata_buffer_t *hal_metadata)
10854 {
10855 int32_t rc = NO_ERROR;
10856
10857 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10858 camera_metadata_ro_entry entry =
10859 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10860 if (0 == entry.count)
10861 return rc;
10862
10863 uint8_t fwk_sceneMode = entry.data.u8[0];
10864
10865 int val = lookupHalName(SCENE_MODES_MAP,
10866 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10867 fwk_sceneMode);
10868 if (NAME_NOT_FOUND != val) {
10869 uint8_t sceneMode = (uint8_t)val;
10870 LOGD("sceneMode: %d", sceneMode);
10871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10872 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10873 rc = BAD_VALUE;
10874 }
10875 }
10876 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10877 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10878 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10879 LOGD("sceneMode: %d", sceneMode);
10880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10881 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10882 rc = BAD_VALUE;
10883 }
10884 }
10885 return rc;
10886 }
10887
10888 /*===========================================================================
10889 * FUNCTION : needRotationReprocess
10890 *
10891 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10892 *
10893 * PARAMETERS : none
10894 *
10895 * RETURN : true: needed
10896 * false: no need
10897 *==========================================================================*/
needRotationReprocess()10898 bool QCamera3HardwareInterface::needRotationReprocess()
10899 {
10900 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10901 // current rotation is not zero, and pp has the capability to process rotation
10902 LOGH("need do reprocess for rotation");
10903 return true;
10904 }
10905
10906 return false;
10907 }
10908
10909 /*===========================================================================
10910 * FUNCTION : needReprocess
10911 *
10912 * DESCRIPTION: if reprocess in needed
10913 *
10914 * PARAMETERS : none
10915 *
10916 * RETURN : true: needed
10917 * false: no need
10918 *==========================================================================*/
needReprocess(cam_feature_mask_t postprocess_mask)10919 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10920 {
10921 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10922 // TODO: add for ZSL HDR later
10923 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10924 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10925 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10926 return true;
10927 } else {
10928 LOGH("already post processed frame");
10929 return false;
10930 }
10931 }
10932 return needRotationReprocess();
10933 }
10934
10935 /*===========================================================================
10936 * FUNCTION : needJpegExifRotation
10937 *
10938 * DESCRIPTION: if rotation from jpeg is needed
10939 *
10940 * PARAMETERS : none
10941 *
10942 * RETURN : true: needed
10943 * false: no need
10944 *==========================================================================*/
needJpegExifRotation()10945 bool QCamera3HardwareInterface::needJpegExifRotation()
10946 {
10947 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10948 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10949 LOGD("Need use Jpeg EXIF Rotation");
10950 return true;
10951 }
10952 return false;
10953 }
10954
10955 /*===========================================================================
10956 * FUNCTION : addOfflineReprocChannel
10957 *
10958 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10959 * coming from input channel
10960 *
10961 * PARAMETERS :
10962 * @config : reprocess configuration
10963 * @inputChHandle : pointer to the input (source) channel
10964 *
10965 *
10966 * RETURN : Ptr to the newly created channel obj. NULL if failed.
10967 *==========================================================================*/
addOfflineReprocChannel(const reprocess_config_t & config,QCamera3ProcessingChannel * inputChHandle)10968 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10969 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10970 {
10971 int32_t rc = NO_ERROR;
10972 QCamera3ReprocessChannel *pChannel = NULL;
10973
10974 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10975 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10976 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10977 if (NULL == pChannel) {
10978 LOGE("no mem for reprocess channel");
10979 return NULL;
10980 }
10981
10982 rc = pChannel->initialize(IS_TYPE_NONE);
10983 if (rc != NO_ERROR) {
10984 LOGE("init reprocess channel failed, ret = %d", rc);
10985 delete pChannel;
10986 return NULL;
10987 }
10988
10989 // pp feature config
10990 cam_pp_feature_config_t pp_config;
10991 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10992
10993 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10994 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10995 & CAM_QCOM_FEATURE_DSDN) {
10996 //Use CPP CDS incase h/w supports it.
10997 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10998 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10999 }
11000 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11001 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11002 }
11003
11004 rc = pChannel->addReprocStreamsFromSource(pp_config,
11005 config,
11006 IS_TYPE_NONE,
11007 mMetadataChannel);
11008
11009 if (rc != NO_ERROR) {
11010 delete pChannel;
11011 return NULL;
11012 }
11013 return pChannel;
11014 }
11015
11016 /*===========================================================================
11017 * FUNCTION : getMobicatMask
11018 *
11019 * DESCRIPTION: returns mobicat mask
11020 *
11021 * PARAMETERS : none
11022 *
11023 * RETURN : mobicat mask
11024 *
11025 *==========================================================================*/
getMobicatMask()11026 uint8_t QCamera3HardwareInterface::getMobicatMask()
11027 {
11028 return m_MobicatMask;
11029 }
11030
11031 /*===========================================================================
11032 * FUNCTION : setMobicat
11033 *
11034 * DESCRIPTION: set Mobicat on/off.
11035 *
11036 * PARAMETERS :
11037 * @params : none
11038 *
11039 * RETURN : int32_t type of status
11040 * NO_ERROR -- success
11041 * none-zero failure code
11042 *==========================================================================*/
setMobicat()11043 int32_t QCamera3HardwareInterface::setMobicat()
11044 {
11045 int32_t ret = NO_ERROR;
11046
11047 if (m_MobicatMask) {
11048 tune_cmd_t tune_cmd;
11049 tune_cmd.type = SET_RELOAD_CHROMATIX;
11050 tune_cmd.module = MODULE_ALL;
11051 tune_cmd.value = TRUE;
11052 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11053 CAM_INTF_PARM_SET_VFE_COMMAND,
11054 tune_cmd);
11055
11056 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11057 CAM_INTF_PARM_SET_PP_COMMAND,
11058 tune_cmd);
11059 }
11060
11061 return ret;
11062 }
11063
11064 /*===========================================================================
11065 * FUNCTION : getLogLevel
11066 *
11067 * DESCRIPTION: Reads the log level property into a variable
11068 *
11069 * PARAMETERS :
11070 * None
11071 *
11072 * RETURN :
11073 * None
11074 *==========================================================================*/
getLogLevel()11075 void QCamera3HardwareInterface::getLogLevel()
11076 {
11077 char prop[PROPERTY_VALUE_MAX];
11078 uint32_t globalLogLevel = 0;
11079
11080 property_get("persist.camera.hal.debug", prop, "0");
11081 int val = atoi(prop);
11082 if (0 <= val) {
11083 gCamHal3LogLevel = (uint32_t)val;
11084 }
11085
11086 property_get("persist.camera.kpi.debug", prop, "1");
11087 gKpiDebugLevel = atoi(prop);
11088
11089 property_get("persist.camera.global.debug", prop, "0");
11090 val = atoi(prop);
11091 if (0 <= val) {
11092 globalLogLevel = (uint32_t)val;
11093 }
11094
11095 /* Highest log level among hal.logs and global.logs is selected */
11096 if (gCamHal3LogLevel < globalLogLevel)
11097 gCamHal3LogLevel = globalLogLevel;
11098
11099 return;
11100 }
11101
11102 /*===========================================================================
11103 * FUNCTION : validateStreamRotations
11104 *
11105 * DESCRIPTION: Check if the rotations requested are supported
11106 *
11107 * PARAMETERS :
11108 * @stream_list : streams to be configured
11109 *
11110 * RETURN : NO_ERROR on success
11111 * -EINVAL on failure
11112 *
11113 *==========================================================================*/
validateStreamRotations(camera3_stream_configuration_t * streamList)11114 int QCamera3HardwareInterface::validateStreamRotations(
11115 camera3_stream_configuration_t *streamList)
11116 {
11117 int rc = NO_ERROR;
11118
11119 /*
11120 * Loop through all streams requested in configuration
11121 * Check if unsupported rotations have been requested on any of them
11122 */
11123 for (size_t j = 0; j < streamList->num_streams; j++){
11124 camera3_stream_t *newStream = streamList->streams[j];
11125
11126 switch(newStream->rotation) {
11127 case CAMERA3_STREAM_ROTATION_0:
11128 case CAMERA3_STREAM_ROTATION_90:
11129 case CAMERA3_STREAM_ROTATION_180:
11130 case CAMERA3_STREAM_ROTATION_270:
11131 //Expected values
11132 break;
11133 default:
11134 LOGE("Error: Unsupported rotation of %d requested for stream"
11135 "type:%d and stream format:%d",
11136 newStream->rotation, newStream->stream_type,
11137 newStream->format);
11138 return -EINVAL;
11139 }
11140
11141 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11142 bool isImplDef = (newStream->format ==
11143 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11144 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11145 isImplDef);
11146
11147 if (isRotated && (!isImplDef || isZsl)) {
11148 LOGE("Error: Unsupported rotation of %d requested for stream"
11149 "type:%d and stream format:%d",
11150 newStream->rotation, newStream->stream_type,
11151 newStream->format);
11152 rc = -EINVAL;
11153 break;
11154 }
11155 }
11156
11157 return rc;
11158 }
11159
11160 /*===========================================================================
11161 * FUNCTION : getFlashInfo
11162 *
11163 * DESCRIPTION: Retrieve information about whether the device has a flash.
11164 *
11165 * PARAMETERS :
11166 * @cameraId : Camera id to query
11167 * @hasFlash : Boolean indicating whether there is a flash device
11168 * associated with given camera
11169 * @flashNode : If a flash device exists, this will be its device node.
11170 *
11171 * RETURN :
11172 * None
11173 *==========================================================================*/
getFlashInfo(const int cameraId,bool & hasFlash,char (& flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])11174 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11175 bool& hasFlash,
11176 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11177 {
11178 cam_capability_t* camCapability = gCamCapability[cameraId];
11179 if (NULL == camCapability) {
11180 hasFlash = false;
11181 flashNode[0] = '\0';
11182 } else {
11183 hasFlash = camCapability->flash_available;
11184 strlcpy(flashNode,
11185 (char*)camCapability->flash_dev_name,
11186 QCAMERA_MAX_FILEPATH_LENGTH);
11187 }
11188 }
11189
11190 /*===========================================================================
11191 * FUNCTION : getEepromVersionInfo
11192 *
11193 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
11194 *
11195 * PARAMETERS : None
11196 *
11197 * RETURN : string describing EEPROM version
11198 * "\0" if no such info available
11199 *==========================================================================*/
getEepromVersionInfo()11200 const char *QCamera3HardwareInterface::getEepromVersionInfo()
11201 {
11202 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11203 }
11204
11205 /*===========================================================================
11206 * FUNCTION : getLdafCalib
11207 *
11208 * DESCRIPTION: Retrieve Laser AF calibration data
11209 *
11210 * PARAMETERS : None
11211 *
11212 * RETURN : Two uint32_t describing laser AF calibration data
11213 * NULL if none is available.
11214 *==========================================================================*/
getLdafCalib()11215 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11216 {
11217 if (mLdafCalibExist) {
11218 return &mLdafCalib[0];
11219 } else {
11220 return NULL;
11221 }
11222 }
11223
11224 /*===========================================================================
11225 * FUNCTION : dynamicUpdateMetaStreamInfo
11226 *
11227 * DESCRIPTION: This function:
11228 * (1) stops all the channels
11229 * (2) returns error on pending requests and buffers
11230 * (3) sends metastream_info in setparams
11231 * (4) starts all channels
11232 * This is useful when sensor has to be restarted to apply any
11233 * settings such as frame rate from a different sensor mode
11234 *
11235 * PARAMETERS : None
11236 *
11237 * RETURN : NO_ERROR on success
11238 * Error codes on failure
11239 *
11240 *==========================================================================*/
dynamicUpdateMetaStreamInfo()11241 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
11242 {
11243 ATRACE_CALL();
11244 int rc = NO_ERROR;
11245
11246 LOGD("E");
11247
11248 rc = stopAllChannels();
11249 if (rc < 0) {
11250 LOGE("stopAllChannels failed");
11251 return rc;
11252 }
11253
11254 rc = notifyErrorForPendingRequests();
11255 if (rc < 0) {
11256 LOGE("notifyErrorForPendingRequests failed");
11257 return rc;
11258 }
11259
11260 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
11261 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
11262 "Format:%d",
11263 mStreamConfigInfo.type[i],
11264 mStreamConfigInfo.stream_sizes[i].width,
11265 mStreamConfigInfo.stream_sizes[i].height,
11266 mStreamConfigInfo.postprocess_mask[i],
11267 mStreamConfigInfo.format[i]);
11268 }
11269
11270 /* Send meta stream info once again so that ISP can start */
11271 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11272 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
11273 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
11274 mParameters);
11275 if (rc < 0) {
11276 LOGE("set Metastreaminfo failed. Sensor mode does not change");
11277 }
11278
11279 rc = startAllChannels();
11280 if (rc < 0) {
11281 LOGE("startAllChannels failed");
11282 return rc;
11283 }
11284
11285 LOGD("X");
11286 return rc;
11287 }
11288
11289 /*===========================================================================
11290 * FUNCTION : stopAllChannels
11291 *
11292 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
11293 *
11294 * PARAMETERS : None
11295 *
11296 * RETURN : NO_ERROR on success
11297 * Error codes on failure
11298 *
11299 *==========================================================================*/
stopAllChannels()11300 int32_t QCamera3HardwareInterface::stopAllChannels()
11301 {
11302 int32_t rc = NO_ERROR;
11303
11304 LOGD("Stopping all channels");
11305 // Stop the Streams/Channels
11306 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11307 it != mStreamInfo.end(); it++) {
11308 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11309 if (channel) {
11310 channel->stop();
11311 }
11312 (*it)->status = INVALID;
11313 }
11314
11315 if (mSupportChannel) {
11316 mSupportChannel->stop();
11317 }
11318 if (mAnalysisChannel) {
11319 mAnalysisChannel->stop();
11320 }
11321 if (mRawDumpChannel) {
11322 mRawDumpChannel->stop();
11323 }
11324 if (mMetadataChannel) {
11325 /* If content of mStreamInfo is not 0, there is metadata stream */
11326 mMetadataChannel->stop();
11327 }
11328
11329 LOGD("All channels stopped");
11330 return rc;
11331 }
11332
11333 /*===========================================================================
11334 * FUNCTION : startAllChannels
11335 *
11336 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
11337 *
11338 * PARAMETERS : None
11339 *
11340 * RETURN : NO_ERROR on success
11341 * Error codes on failure
11342 *
11343 *==========================================================================*/
startAllChannels()11344 int32_t QCamera3HardwareInterface::startAllChannels()
11345 {
11346 int32_t rc = NO_ERROR;
11347
11348 LOGD("Start all channels ");
11349 // Start the Streams/Channels
11350 if (mMetadataChannel) {
11351 /* If content of mStreamInfo is not 0, there is metadata stream */
11352 rc = mMetadataChannel->start();
11353 if (rc < 0) {
11354 LOGE("META channel start failed");
11355 return rc;
11356 }
11357 }
11358 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11359 it != mStreamInfo.end(); it++) {
11360 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11361 if (channel) {
11362 rc = channel->start();
11363 if (rc < 0) {
11364 LOGE("channel start failed");
11365 return rc;
11366 }
11367 }
11368 }
11369 if (mAnalysisChannel) {
11370 mAnalysisChannel->start();
11371 }
11372 if (mSupportChannel) {
11373 rc = mSupportChannel->start();
11374 if (rc < 0) {
11375 LOGE("Support channel start failed");
11376 return rc;
11377 }
11378 }
11379 if (mRawDumpChannel) {
11380 rc = mRawDumpChannel->start();
11381 if (rc < 0) {
11382 LOGE("RAW dump channel start failed");
11383 return rc;
11384 }
11385 }
11386
11387 LOGD("All channels started");
11388 return rc;
11389 }
11390
11391 /*===========================================================================
11392 * FUNCTION : notifyErrorForPendingRequests
11393 *
11394 * DESCRIPTION: This function sends error for all the pending requests/buffers
11395 *
11396 * PARAMETERS : None
11397 *
11398 * RETURN : Error codes
11399 * NO_ERROR on success
11400 *
11401 *==========================================================================*/
notifyErrorForPendingRequests()11402 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
11403 {
11404 int32_t rc = NO_ERROR;
11405 unsigned int frameNum = 0;
11406 camera3_capture_result_t result;
11407 camera3_stream_buffer_t *pStream_Buf = NULL;
11408
11409 memset(&result, 0, sizeof(camera3_capture_result_t));
11410
11411 if (mPendingRequestsList.size() > 0) {
11412 pendingRequestIterator i = mPendingRequestsList.begin();
11413 frameNum = i->frame_number;
11414 } else {
11415 /* There might still be pending buffers even though there are
11416 no pending requests. Setting the frameNum to MAX so that
11417 all the buffers with smaller frame numbers are returned */
11418 frameNum = UINT_MAX;
11419 }
11420
11421 LOGH("Oldest frame num on mPendingRequestsList = %u",
11422 frameNum);
11423
11424 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
11425 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
11426
11427 if (req->frame_number < frameNum) {
11428 // Send Error notify to frameworks for each buffer for which
11429 // metadata buffer is already sent
11430 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
11431 req->frame_number, req->mPendingBufferList.size());
11432
11433 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11434 if (NULL == pStream_Buf) {
11435 LOGE("No memory for pending buffers array");
11436 return NO_MEMORY;
11437 }
11438 memset(pStream_Buf, 0,
11439 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11440 result.result = NULL;
11441 result.frame_number = req->frame_number;
11442 result.num_output_buffers = req->mPendingBufferList.size();
11443 result.output_buffers = pStream_Buf;
11444
11445 size_t index = 0;
11446 for (auto info = req->mPendingBufferList.begin();
11447 info != req->mPendingBufferList.end(); ) {
11448
11449 camera3_notify_msg_t notify_msg;
11450 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
11451 notify_msg.type = CAMERA3_MSG_ERROR;
11452 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
11453 notify_msg.message.error.error_stream = info->stream;
11454 notify_msg.message.error.frame_number = req->frame_number;
11455 pStream_Buf[index].acquire_fence = -1;
11456 pStream_Buf[index].release_fence = -1;
11457 pStream_Buf[index].buffer = info->buffer;
11458 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11459 pStream_Buf[index].stream = info->stream;
11460 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
11461 index++;
11462 // Remove buffer from list
11463 info = req->mPendingBufferList.erase(info);
11464 }
11465
11466 // Remove this request from Map
11467 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11468 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11469 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11470
11471 mCallbackOps->process_capture_result(mCallbackOps, &result);
11472
11473 delete [] pStream_Buf;
11474 } else {
11475
11476 // Go through the pending requests info and send error request to framework
11477 LOGE("Sending ERROR REQUEST for all pending requests");
11478 // Some frame might be missing in mPendingRequestsList.
11479 bool foundMatchingPendingReq = false;
11480 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
11481 if (i != mPendingRequestsList.end() && i->frame_number == req->frame_number) {
11482 foundMatchingPendingReq = true;
11483 }
11484
11485 LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
11486
11487 // Send error notify to frameworks
11488 camera3_notify_msg_t notify_msg;
11489 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t));
11490 notify_msg.type = CAMERA3_MSG_ERROR;
11491 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
11492 notify_msg.message.error.error_stream = NULL;
11493 notify_msg.message.error.frame_number = req->frame_number;
11494 mCallbackOps->notify(mCallbackOps, ¬ify_msg);
11495
11496 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11497 if (NULL == pStream_Buf) {
11498 LOGE("No memory for pending buffers array");
11499 return NO_MEMORY;
11500 }
11501 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11502
11503 result.result = NULL;
11504 result.frame_number = req->frame_number;
11505 result.input_buffer = foundMatchingPendingReq ? i->input_buffer : nullptr;
11506 result.num_output_buffers = req->mPendingBufferList.size();
11507 result.output_buffers = pStream_Buf;
11508
11509 size_t index = 0;
11510 for (auto info = req->mPendingBufferList.begin();
11511 info != req->mPendingBufferList.end(); ) {
11512 pStream_Buf[index].acquire_fence = -1;
11513 pStream_Buf[index].release_fence = -1;
11514 pStream_Buf[index].buffer = info->buffer;
11515 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11516 pStream_Buf[index].stream = info->stream;
11517 index++;
11518 // Remove buffer from list
11519 info = req->mPendingBufferList.erase(info);
11520 }
11521
11522 // Remove this request from Map
11523 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11524 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11525 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11526
11527 mCallbackOps->process_capture_result(mCallbackOps, &result);
11528 delete [] pStream_Buf;
11529 if (foundMatchingPendingReq) {
11530 i = erasePendingRequest(i);
11531 }
11532 }
11533 }
11534
11535 /* Reset pending frame Drop list and requests list */
11536 mPendingFrameDropList.clear();
11537
11538 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11539 req.mPendingBufferList.clear();
11540 }
11541 mPendingBuffersMap.mPendingBuffersInRequest.clear();
11542 mPendingReprocessResultList.clear();
11543 LOGH("Cleared all the pending buffers ");
11544
11545 return rc;
11546 }
11547
isOnEncoder(const cam_dimension_t max_viewfinder_size,uint32_t width,uint32_t height)11548 bool QCamera3HardwareInterface::isOnEncoder(
11549 const cam_dimension_t max_viewfinder_size,
11550 uint32_t width, uint32_t height)
11551 {
11552 return (width > (uint32_t)max_viewfinder_size.width ||
11553 height > (uint32_t)max_viewfinder_size.height);
11554 }
11555
11556 /*===========================================================================
11557 * FUNCTION : setBundleInfo
11558 *
11559 * DESCRIPTION: Set bundle info for all streams that are bundle.
11560 *
11561 * PARAMETERS : None
11562 *
11563 * RETURN : NO_ERROR on success
11564 * Error codes on failure
11565 *==========================================================================*/
setBundleInfo()11566 int32_t QCamera3HardwareInterface::setBundleInfo()
11567 {
11568 int32_t rc = NO_ERROR;
11569
11570 if (mChannelHandle) {
11571 cam_bundle_config_t bundleInfo;
11572 memset(&bundleInfo, 0, sizeof(bundleInfo));
11573 rc = mCameraHandle->ops->get_bundle_info(
11574 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11575 if (rc != NO_ERROR) {
11576 LOGE("get_bundle_info failed");
11577 return rc;
11578 }
11579 if (mAnalysisChannel) {
11580 mAnalysisChannel->setBundleInfo(bundleInfo);
11581 }
11582 if (mSupportChannel) {
11583 mSupportChannel->setBundleInfo(bundleInfo);
11584 }
11585 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11586 it != mStreamInfo.end(); it++) {
11587 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11588 channel->setBundleInfo(bundleInfo);
11589 }
11590 if (mRawDumpChannel) {
11591 mRawDumpChannel->setBundleInfo(bundleInfo);
11592 }
11593 }
11594
11595 return rc;
11596 }
11597
11598 /*===========================================================================
11599 * FUNCTION : get_num_overall_buffers
11600 *
11601 * DESCRIPTION: Estimate number of pending buffers across all requests.
11602 *
11603 * PARAMETERS : None
11604 *
11605 * RETURN : Number of overall pending buffers
11606 *
11607 *==========================================================================*/
get_num_overall_buffers()11608 uint32_t PendingBuffersMap::get_num_overall_buffers()
11609 {
11610 uint32_t sum_buffers = 0;
11611 for (auto &req : mPendingBuffersInRequest) {
11612 sum_buffers += req.mPendingBufferList.size();
11613 }
11614 return sum_buffers;
11615 }
11616
11617 /*===========================================================================
11618 * FUNCTION : removeBuf
11619 *
11620 * DESCRIPTION: Remove a matching buffer from tracker.
11621 *
11622 * PARAMETERS : @buffer: image buffer for the callback
11623 *
11624 * RETURN : None
11625 *
11626 *==========================================================================*/
removeBuf(buffer_handle_t * buffer)11627 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11628 {
11629 bool buffer_found = false;
11630 for (auto req = mPendingBuffersInRequest.begin();
11631 req != mPendingBuffersInRequest.end(); req++) {
11632 for (auto k = req->mPendingBufferList.begin();
11633 k != req->mPendingBufferList.end(); k++ ) {
11634 if (k->buffer == buffer) {
11635 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11636 req->frame_number, buffer);
11637 k = req->mPendingBufferList.erase(k);
11638 if (req->mPendingBufferList.empty()) {
11639 // Remove this request from Map
11640 req = mPendingBuffersInRequest.erase(req);
11641 }
11642 buffer_found = true;
11643 break;
11644 }
11645 }
11646 if (buffer_found) {
11647 break;
11648 }
11649 }
11650 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11651 get_num_overall_buffers());
11652 }
11653
11654 /*===========================================================================
11655 * FUNCTION : getBufErrStatus
11656 *
11657 * DESCRIPTION: get buffer error status
11658 *
11659 * PARAMETERS : @buffer: buffer handle
11660 *
11661 * RETURN : None
11662 *
11663 *==========================================================================*/
getBufErrStatus(buffer_handle_t * buffer)11664 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11665 {
11666 for (auto& req : mPendingBuffersInRequest) {
11667 for (auto& k : req.mPendingBufferList) {
11668 if (k.buffer == buffer)
11669 return k.bufStatus;
11670 }
11671 }
11672 return CAMERA3_BUFFER_STATUS_OK;
11673 }
11674
11675 /*===========================================================================
11676 * FUNCTION : setPAAFSupport
11677 *
11678 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11679 * feature mask according to stream type and filter
11680 * arrangement
11681 *
11682 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11683 * @stream_type: stream type
11684 * @filter_arrangement: filter arrangement
11685 *
11686 * RETURN : None
11687 *==========================================================================*/
setPAAFSupport(cam_feature_mask_t & feature_mask,cam_stream_type_t stream_type,cam_color_filter_arrangement_t filter_arrangement)11688 void QCamera3HardwareInterface::setPAAFSupport(
11689 cam_feature_mask_t& feature_mask,
11690 cam_stream_type_t stream_type,
11691 cam_color_filter_arrangement_t filter_arrangement)
11692 {
11693 switch (filter_arrangement) {
11694 case CAM_FILTER_ARRANGEMENT_RGGB:
11695 case CAM_FILTER_ARRANGEMENT_GRBG:
11696 case CAM_FILTER_ARRANGEMENT_GBRG:
11697 case CAM_FILTER_ARRANGEMENT_BGGR:
11698 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11699 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11700 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11701 }
11702 break;
11703 case CAM_FILTER_ARRANGEMENT_Y:
11704 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11705 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11706 }
11707 break;
11708 default:
11709 break;
11710 }
11711 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11712 feature_mask, stream_type, filter_arrangement);
11713
11714
11715 }
11716
11717 /*===========================================================================
11718 * FUNCTION : adjustBlackLevelForCFA
11719 *
11720 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11721 * of bayer CFA (Color Filter Array).
11722 *
11723 * PARAMETERS : @input: black level pattern in the order of RGGB
11724 * @output: black level pattern in the order of CFA
11725 * @color_arrangement: CFA color arrangement
11726 *
11727 * RETURN : None
11728 *==========================================================================*/
11729 template<typename T>
adjustBlackLevelForCFA(T input[BLACK_LEVEL_PATTERN_CNT],T output[BLACK_LEVEL_PATTERN_CNT],cam_color_filter_arrangement_t color_arrangement)11730 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11731 T input[BLACK_LEVEL_PATTERN_CNT],
11732 T output[BLACK_LEVEL_PATTERN_CNT],
11733 cam_color_filter_arrangement_t color_arrangement)
11734 {
11735 switch (color_arrangement) {
11736 case CAM_FILTER_ARRANGEMENT_GRBG:
11737 output[0] = input[1];
11738 output[1] = input[0];
11739 output[2] = input[3];
11740 output[3] = input[2];
11741 break;
11742 case CAM_FILTER_ARRANGEMENT_GBRG:
11743 output[0] = input[2];
11744 output[1] = input[3];
11745 output[2] = input[0];
11746 output[3] = input[1];
11747 break;
11748 case CAM_FILTER_ARRANGEMENT_BGGR:
11749 output[0] = input[3];
11750 output[1] = input[2];
11751 output[2] = input[1];
11752 output[3] = input[0];
11753 break;
11754 case CAM_FILTER_ARRANGEMENT_RGGB:
11755 output[0] = input[0];
11756 output[1] = input[1];
11757 output[2] = input[2];
11758 output[3] = input[3];
11759 break;
11760 default:
11761 LOGE("Invalid color arrangement to derive dynamic blacklevel");
11762 break;
11763 }
11764 }
11765
11766 /*===========================================================================
11767 * FUNCTION : is60HzZone
11768 *
11769 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11770 *
11771 * PARAMETERS : None
11772 *
11773 * RETURN : True if in 60Hz zone, False otherwise
11774 *==========================================================================*/
is60HzZone()11775 bool QCamera3HardwareInterface::is60HzZone()
11776 {
11777 time_t t = time(NULL);
11778 struct tm lt;
11779
11780 struct tm* r = localtime_r(&t, <);
11781
11782 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11783 return true;
11784 else
11785 return false;
11786 }
11787 }; //end namespace qcamera
11788