1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "ExtCamDev"
18 // #define LOG_NDEBUG 0
19 #include <log/log.h>
20
21 #include "ExternalCameraDevice.h"
22
23 #include <aidl/android/hardware/camera/common/Status.h>
24 #include <convert.h>
25 #include <linux/videodev2.h>
26 #include <regex>
27 #include <set>
28
29 namespace android {
30 namespace hardware {
31 namespace camera {
32 namespace device {
33 namespace implementation {
34
35 using ::aidl::android::hardware::camera::common::Status;
36
37 namespace {
38 // Only support MJPEG for now as it seems to be the one supports higher fps
39 // Other formats to consider in the future:
40 // * V4L2_PIX_FMT_YVU420 (== YV12)
41 // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
42 const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
43 {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11
44
45 constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
46 constexpr int OPEN_RETRY_SLEEP_US = 100'000; // 100ms * MAX_RETRY = 0.5 seconds
47
48 const std::regex kDevicePathRE("/dev/video([0-9]+)");
49 } // namespace
50
51 std::string ExternalCameraDevice::kDeviceVersion = "1.1";
52
ExternalCameraDevice(const std::string & devicePath,const ExternalCameraConfig & config)53 ExternalCameraDevice::ExternalCameraDevice(const std::string& devicePath,
54 const ExternalCameraConfig& config)
55 : mCameraId("-1"), mDevicePath(devicePath), mCfg(config) {
56 std::smatch sm;
57 if (std::regex_match(mDevicePath, sm, kDevicePathRE)) {
58 mCameraId = std::to_string(mCfg.cameraIdOffset + std::stoi(sm[1]));
59 } else {
60 ALOGE("%s: device path match failed for %s", __FUNCTION__, mDevicePath.c_str());
61 }
62 }
63
~ExternalCameraDevice()64 ExternalCameraDevice::~ExternalCameraDevice() {}
65
getCameraCharacteristics(CameraMetadata * _aidl_return)66 ndk::ScopedAStatus ExternalCameraDevice::getCameraCharacteristics(CameraMetadata* _aidl_return) {
67 Mutex::Autolock _l(mLock);
68 if (_aidl_return == nullptr) {
69 return fromStatus(Status::ILLEGAL_ARGUMENT);
70 }
71
72 if (isInitFailedLocked()) {
73 return fromStatus(Status::INTERNAL_ERROR);
74 }
75
76 const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock();
77 convertToAidl(rawMetadata, _aidl_return);
78 mCameraCharacteristics.unlock(rawMetadata);
79 return fromStatus(Status::OK);
80 }
81
getPhysicalCameraCharacteristics(const std::string &,CameraMetadata *)82 ndk::ScopedAStatus ExternalCameraDevice::getPhysicalCameraCharacteristics(const std::string&,
83 CameraMetadata*) {
84 ALOGE("%s: Physical camera functions are not supported for external cameras.", __FUNCTION__);
85 return fromStatus(Status::ILLEGAL_ARGUMENT);
86 }
87
getResourceCost(CameraResourceCost * _aidl_return)88 ndk::ScopedAStatus ExternalCameraDevice::getResourceCost(CameraResourceCost* _aidl_return) {
89 if (_aidl_return == nullptr) {
90 return fromStatus(Status::ILLEGAL_ARGUMENT);
91 }
92
93 _aidl_return->resourceCost = 100;
94 return fromStatus(Status::OK);
95 }
96
isStreamCombinationSupported(const StreamConfiguration & in_streams,bool * _aidl_return)97 ndk::ScopedAStatus ExternalCameraDevice::isStreamCombinationSupported(
98 const StreamConfiguration& in_streams, bool* _aidl_return) {
99 if (isInitFailed()) {
100 ALOGE("%s: camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str());
101 return fromStatus(Status::INTERNAL_ERROR);
102 }
103 Status s = ExternalCameraDeviceSession::isStreamCombinationSupported(in_streams,
104 mSupportedFormats, mCfg);
105 *_aidl_return = s == Status::OK;
106 return fromStatus(Status::OK);
107 }
108
open(const std::shared_ptr<ICameraDeviceCallback> & in_callback,std::shared_ptr<ICameraDeviceSession> * _aidl_return)109 ndk::ScopedAStatus ExternalCameraDevice::open(
110 const std::shared_ptr<ICameraDeviceCallback>& in_callback,
111 std::shared_ptr<ICameraDeviceSession>* _aidl_return) {
112 if (_aidl_return == nullptr) {
113 ALOGE("%s: cannot open camera %s. return session ptr is null!", __FUNCTION__,
114 mCameraId.c_str());
115 return fromStatus(Status::ILLEGAL_ARGUMENT);
116 }
117
118 Mutex::Autolock _l(mLock);
119 if (isInitFailedLocked()) {
120 ALOGE("%s: cannot open camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str());
121 return fromStatus(Status::INTERNAL_ERROR);
122 }
123
124 std::shared_ptr<ExternalCameraDeviceSession> session;
125 ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str());
126 session = mSession.lock();
127
128 if (session != nullptr && !session->isClosed()) {
129 ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);
130 return fromStatus(Status::CAMERA_IN_USE);
131 }
132
133 int numAttempt = 0;
134 unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
135 while (fd.get() < 0 && numAttempt < MAX_RETRY) {
136 // Previous retry attempts failed. Retry opening the device at most MAX_RETRY times
137 ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again", __FUNCTION__,
138 mDevicePath.c_str());
139 usleep(OPEN_RETRY_SLEEP_US); // sleep and try again
140 fd.reset(::open(mDevicePath.c_str(), O_RDWR));
141 numAttempt++;
142 }
143
144 if (fd.get() < 0) {
145 ALOGE("%s: v4l2 device open %s failed: %s", __FUNCTION__, mDevicePath.c_str(),
146 strerror(errno));
147 return fromStatus(Status::INTERNAL_ERROR);
148 }
149
150 session = createSession(in_callback, mCfg, mSupportedFormats, mCroppingType,
151 mCameraCharacteristics, mCameraId, std::move(fd));
152 if (session == nullptr) {
153 ALOGE("%s: camera device session allocation failed", __FUNCTION__);
154 return fromStatus(Status::INTERNAL_ERROR);
155 }
156
157 if (session->isInitFailed()) {
158 ALOGE("%s: camera device session init failed", __FUNCTION__);
159 return fromStatus(Status::INTERNAL_ERROR);
160 }
161
162 mSession = session;
163 *_aidl_return = session;
164 return fromStatus(Status::OK);
165 }
166
openInjectionSession(const std::shared_ptr<ICameraDeviceCallback> &,std::shared_ptr<ICameraInjectionSession> *)167 ndk::ScopedAStatus ExternalCameraDevice::openInjectionSession(
168 const std::shared_ptr<ICameraDeviceCallback>&, std::shared_ptr<ICameraInjectionSession>*) {
169 return fromStatus(Status::OPERATION_NOT_SUPPORTED);
170 }
171
setTorchMode(bool)172 ndk::ScopedAStatus ExternalCameraDevice::setTorchMode(bool) {
173 return fromStatus(Status::OPERATION_NOT_SUPPORTED);
174 }
175
turnOnTorchWithStrengthLevel(int32_t)176 ndk::ScopedAStatus ExternalCameraDevice::turnOnTorchWithStrengthLevel(int32_t) {
177 return fromStatus(Status::OPERATION_NOT_SUPPORTED);
178 }
179
getTorchStrengthLevel(int32_t *)180 ndk::ScopedAStatus ExternalCameraDevice::getTorchStrengthLevel(int32_t*) {
181 return fromStatus(Status::OPERATION_NOT_SUPPORTED);
182 }
183
createSession(const std::shared_ptr<ICameraDeviceCallback> & cb,const ExternalCameraConfig & cfg,const std::vector<SupportedV4L2Format> & sortedFormats,const CroppingType & croppingType,const common::V1_0::helper::CameraMetadata & chars,const std::string & cameraId,unique_fd v4l2Fd)184 std::shared_ptr<ExternalCameraDeviceSession> ExternalCameraDevice::createSession(
185 const std::shared_ptr<ICameraDeviceCallback>& cb, const ExternalCameraConfig& cfg,
186 const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
187 const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
188 unique_fd v4l2Fd) {
189 return ndk::SharedRefBase::make<ExternalCameraDeviceSession>(
190 cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd));
191 }
192
isInitFailed()193 bool ExternalCameraDevice::isInitFailed() {
194 Mutex::Autolock _l(mLock);
195 return isInitFailedLocked();
196 }
197
isInitFailedLocked()198 bool ExternalCameraDevice::isInitFailedLocked() {
199 if (!mInitialized) {
200 status_t ret = initCameraCharacteristics();
201 if (ret != OK) {
202 ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret);
203 mInitFailed = true;
204 }
205 mInitialized = true;
206 }
207 return mInitFailed;
208 }
209
initSupportedFormatsLocked(int fd)210 void ExternalCameraDevice::initSupportedFormatsLocked(int fd) {
211 std::vector<SupportedV4L2Format> horizontalFmts =
212 getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits,
213 mCfg.minStreamSize, mCfg.depthEnabled);
214 std::vector<SupportedV4L2Format> verticalFmts =
215 getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits,
216 mCfg.minStreamSize, mCfg.depthEnabled);
217
218 size_t horiSize = horizontalFmts.size();
219 size_t vertSize = verticalFmts.size();
220
221 if (horiSize == 0 && vertSize == 0) {
222 ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__);
223 return;
224 }
225
226 if (horiSize == 0) {
227 mSupportedFormats = verticalFmts;
228 mCroppingType = VERTICAL;
229 return;
230 } else if (vertSize == 0) {
231 mSupportedFormats = horizontalFmts;
232 mCroppingType = HORIZONTAL;
233 return;
234 }
235
236 const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1];
237 const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1];
238
239 // Try to keep the largest possible output size
240 // When they are the same or ambiguous, pick the one support more sizes
241 if (maxHoriSize.width == maxVertSize.width && maxHoriSize.height == maxVertSize.height) {
242 if (horiSize > vertSize) {
243 mSupportedFormats = horizontalFmts;
244 mCroppingType = HORIZONTAL;
245 } else {
246 mSupportedFormats = verticalFmts;
247 mCroppingType = VERTICAL;
248 }
249 } else if (maxHoriSize.width >= maxVertSize.width && maxHoriSize.height >= maxVertSize.height) {
250 mSupportedFormats = horizontalFmts;
251 mCroppingType = HORIZONTAL;
252 } else if (maxHoriSize.width <= maxVertSize.width && maxHoriSize.height <= maxVertSize.height) {
253 mSupportedFormats = verticalFmts;
254 mCroppingType = VERTICAL;
255 } else {
256 if (horiSize > vertSize) {
257 mSupportedFormats = horizontalFmts;
258 mCroppingType = HORIZONTAL;
259 } else {
260 mSupportedFormats = verticalFmts;
261 mCroppingType = VERTICAL;
262 }
263 }
264 }
265
initCameraCharacteristics()266 status_t ExternalCameraDevice::initCameraCharacteristics() {
267 if (!mCameraCharacteristics.isEmpty()) {
268 // Camera Characteristics previously initialized. Skip.
269 return OK;
270 }
271
272 // init camera characteristics
273 unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
274 if (fd.get() < 0) {
275 ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mDevicePath.c_str());
276 return DEAD_OBJECT;
277 }
278
279 status_t ret;
280 ret = initDefaultCharsKeys(&mCameraCharacteristics);
281 if (ret != OK) {
282 ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret);
283 mCameraCharacteristics.clear();
284 return ret;
285 }
286
287 ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics);
288 if (ret != OK) {
289 ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret);
290 mCameraCharacteristics.clear();
291 return ret;
292 }
293
294 ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics);
295 if (ret != OK) {
296 ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret);
297 mCameraCharacteristics.clear();
298 return ret;
299 }
300
301 ret = initAvailableCapabilities(&mCameraCharacteristics);
302 if (ret != OK) {
303 ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret);
304 mCameraCharacteristics.clear();
305 return ret;
306 }
307
308 return OK;
309 }
310
311 #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
312 #define UPDATE(tag, data, size) \
313 do { \
314 if (metadata->update((tag), (data), (size))) { \
315 ALOGE("Update " #tag " failed!"); \
316 return -EINVAL; \
317 } \
318 } while (0)
319
initAvailableCapabilities(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)320 status_t ExternalCameraDevice::initAvailableCapabilities(
321 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
322 if (mSupportedFormats.empty()) {
323 ALOGE("%s: Supported formats list is empty", __FUNCTION__);
324 return UNKNOWN_ERROR;
325 }
326
327 bool hasDepth = false;
328 bool hasColor = false;
329 for (const auto& fmt : mSupportedFormats) {
330 switch (fmt.fourcc) {
331 case V4L2_PIX_FMT_Z16:
332 hasDepth = true;
333 break;
334 case V4L2_PIX_FMT_MJPEG:
335 hasColor = true;
336 break;
337 default:
338 ALOGW("%s: Unsupported format found", __FUNCTION__);
339 }
340 }
341
342 std::vector<uint8_t> availableCapabilities;
343 if (hasDepth) {
344 availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
345 }
346 if (hasColor) {
347 availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
348 }
349 if (!availableCapabilities.empty()) {
350 UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(),
351 availableCapabilities.size());
352 }
353
354 return OK;
355 }
356
initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)357 status_t ExternalCameraDevice::initDefaultCharsKeys(
358 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
359 const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
360 UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1);
361
362 // android.colorCorrection
363 const uint8_t availableAberrationModes[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
364 UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, availableAberrationModes,
365 ARRAY_SIZE(availableAberrationModes));
366
367 // android.control
368 const uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
369 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, &antibandingMode, 1);
370
371 const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
372 UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, ARRAY_SIZE(controlMaxRegions));
373
374 const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
375 UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &videoStabilizationMode, 1);
376
377 const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO;
378 UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1);
379
380 const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON;
381 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1);
382
383 const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF;
384 UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1);
385
386 const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO};
387 UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes,
388 ARRAY_SIZE(controlAvailableModes));
389
390 // android.edge
391 const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF;
392 UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1);
393
394 // android.flash
395 const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
396 UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1);
397
398 // android.hotPixel
399 const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF;
400 UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1);
401
402 // android.jpeg
403 const int32_t jpegAvailableThumbnailSizes[] = {0, 0, 176, 144, 240, 144, 256,
404 144, 240, 160, 256, 154, 240, 180};
405 UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes,
406 ARRAY_SIZE(jpegAvailableThumbnailSizes));
407
408 const int32_t jpegMaxSize = mCfg.maxJpegBufSize;
409 UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
410
411 // android.lens
412 const uint8_t focusDistanceCalibration =
413 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
414 UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1);
415
416 const uint8_t opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
417 UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, &opticalStabilizationMode, 1);
418
419 const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL;
420 UPDATE(ANDROID_LENS_FACING, &facing, 1);
421
422 // android.noiseReduction
423 const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
424 UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &noiseReductionMode, 1);
425 UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1);
426
427 const int32_t partialResultCount = 1;
428 UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1);
429
430 // This means pipeline latency of X frame intervals. The maximum number is 4.
431 const uint8_t requestPipelineMaxDepth = 4;
432 UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1);
433
434 // Three numbers represent the maximum numbers of different types of output
435 // streams simultaneously. The types are raw sensor, processed (but not
436 // stalling), and processed (but stalling). For usb limited mode, raw sensor
437 // is not supported. Stalling stream is JPEG. Non-stalling streams are
438 // YUV_420_888 or YV12.
439 const int32_t requestMaxNumOutputStreams[] = {
440 /*RAW*/ 0,
441 /*Processed*/ ExternalCameraDeviceSession::kMaxProcessedStream,
442 /*Stall*/ ExternalCameraDeviceSession::kMaxStallStream};
443 UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams,
444 ARRAY_SIZE(requestMaxNumOutputStreams));
445
446 // Limited mode doesn't support reprocessing.
447 const int32_t requestMaxNumInputStreams = 0;
448 UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, 1);
449
450 // android.scaler
451 // TODO: b/72263447 V4L2_CID_ZOOM_*
452 const float scalerAvailableMaxDigitalZoom[] = {1};
453 UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom,
454 ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
455
456 const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
457 UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
458
459 const int32_t testPatternModes[] = {ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
460 ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR};
461 UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes,
462 ARRAY_SIZE(testPatternModes));
463
464 const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
465 UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
466
467 // Orientation is a bit odd for external camera, but consider it as the orientation
468 // between the external camera sensor (which is usually landscape) and the device's
469 // natural display orientation. For devices with natural landscape display (ex: tablet/TV), the
470 // orientation should be 0. For devices with natural portrait display (phone), the orientation
471 // should be 270.
472 const int32_t orientation = mCfg.orientation;
473 UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
474
475 // android.shading
476 const uint8_t availableMode = ANDROID_SHADING_MODE_OFF;
477 UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availableMode, 1);
478
479 // android.statistics
480 const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
481 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, 1);
482
483 const int32_t maxFaceCount = 0;
484 UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
485
486 const uint8_t availableHotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
487 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, &availableHotpixelMode, 1);
488
489 const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
490 UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, &lensShadingMapMode, 1);
491
492 // android.sync
493 const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
494 UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
495
496 /* Other sensor/RAW related keys:
497 * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW
498 * android.sensor.info.physicalSize -> not available
499 * android.sensor.info.whiteLevel -> not available/not needed
500 * android.sensor.info.lensShadingApplied -> not needed
501 * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed
502 * android.sensor.blackLevelPattern -> not available/not needed
503 */
504
505 const int32_t availableRequestKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
506 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
507 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
508 ANDROID_CONTROL_AE_LOCK,
509 ANDROID_CONTROL_AE_MODE,
510 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
511 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
512 ANDROID_CONTROL_AF_MODE,
513 ANDROID_CONTROL_AF_TRIGGER,
514 ANDROID_CONTROL_AWB_LOCK,
515 ANDROID_CONTROL_AWB_MODE,
516 ANDROID_CONTROL_CAPTURE_INTENT,
517 ANDROID_CONTROL_EFFECT_MODE,
518 ANDROID_CONTROL_MODE,
519 ANDROID_CONTROL_SCENE_MODE,
520 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
521 ANDROID_FLASH_MODE,
522 ANDROID_JPEG_ORIENTATION,
523 ANDROID_JPEG_QUALITY,
524 ANDROID_JPEG_THUMBNAIL_QUALITY,
525 ANDROID_JPEG_THUMBNAIL_SIZE,
526 ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
527 ANDROID_NOISE_REDUCTION_MODE,
528 ANDROID_SCALER_CROP_REGION,
529 ANDROID_SENSOR_TEST_PATTERN_MODE,
530 ANDROID_STATISTICS_FACE_DETECT_MODE,
531 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE};
532 UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys,
533 ARRAY_SIZE(availableRequestKeys));
534
535 const int32_t availableResultKeys[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
536 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
537 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
538 ANDROID_CONTROL_AE_LOCK,
539 ANDROID_CONTROL_AE_MODE,
540 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
541 ANDROID_CONTROL_AE_STATE,
542 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
543 ANDROID_CONTROL_AF_MODE,
544 ANDROID_CONTROL_AF_STATE,
545 ANDROID_CONTROL_AF_TRIGGER,
546 ANDROID_CONTROL_AWB_LOCK,
547 ANDROID_CONTROL_AWB_MODE,
548 ANDROID_CONTROL_AWB_STATE,
549 ANDROID_CONTROL_CAPTURE_INTENT,
550 ANDROID_CONTROL_EFFECT_MODE,
551 ANDROID_CONTROL_MODE,
552 ANDROID_CONTROL_SCENE_MODE,
553 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
554 ANDROID_FLASH_MODE,
555 ANDROID_FLASH_STATE,
556 ANDROID_JPEG_ORIENTATION,
557 ANDROID_JPEG_QUALITY,
558 ANDROID_JPEG_THUMBNAIL_QUALITY,
559 ANDROID_JPEG_THUMBNAIL_SIZE,
560 ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
561 ANDROID_NOISE_REDUCTION_MODE,
562 ANDROID_REQUEST_PIPELINE_DEPTH,
563 ANDROID_SCALER_CROP_REGION,
564 ANDROID_SENSOR_TIMESTAMP,
565 ANDROID_STATISTICS_FACE_DETECT_MODE,
566 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
567 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
568 ANDROID_STATISTICS_SCENE_FLICKER};
569 UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys,
570 ARRAY_SIZE(availableResultKeys));
571
572 UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, AVAILABLE_CHARACTERISTICS_KEYS.data(),
573 AVAILABLE_CHARACTERISTICS_KEYS.size());
574
575 return OK;
576 }
577
initCameraControlsCharsKeys(int,::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)578 status_t ExternalCameraDevice::initCameraControlsCharsKeys(
579 int, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
580 // android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY
581 // android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE
582 // android.sensor.info.maxFrameDuration -> TBD
583 // android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE
584 // android.lens.info.hyperfocalDistance
585 // android.lens.info.availableFocalLengths -> not available?
586
587 // android.control
588 // No AE compensation support for now.
589 // TODO: V4L2_CID_EXPOSURE_BIAS
590 const int32_t controlAeCompensationRange[] = {0, 0};
591 UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange,
592 ARRAY_SIZE(controlAeCompensationRange));
593 const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}};
594 UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep,
595 ARRAY_SIZE(controlAeCompensationStep));
596
597 // TODO: Check V4L2_CID_AUTO_FOCUS_*.
598 const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, ANDROID_CONTROL_AF_MODE_OFF};
599 UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, ARRAY_SIZE(afAvailableModes));
600
601 // TODO: V4L2_CID_SCENE_MODE
602 const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
603 UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1);
604
605 // TODO: V4L2_CID_3A_LOCK
606 const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
607 UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
608 const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
609 UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
610
611 // TODO: V4L2_CID_ZOOM_*
612 const float scalerAvailableMaxDigitalZoom[] = {1};
613 UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, scalerAvailableMaxDigitalZoom,
614 ARRAY_SIZE(scalerAvailableMaxDigitalZoom));
615
616 return OK;
617 }
618
initOutputCharsKeys(int fd,::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)619 status_t ExternalCameraDevice::initOutputCharsKeys(
620 int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
621 initSupportedFormatsLocked(fd);
622 if (mSupportedFormats.empty()) {
623 ALOGE("%s: Init supported format list failed", __FUNCTION__);
624 return UNKNOWN_ERROR;
625 }
626
627 bool hasDepth = false;
628 bool hasColor = false;
629
630 // For V4L2_PIX_FMT_Z16
631 std::array<int, /*size*/ 1> halDepthFormats{{HAL_PIXEL_FORMAT_Y16}};
632 // For V4L2_PIX_FMT_MJPEG
633 std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
634 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
635
636 for (const auto& supportedFormat : mSupportedFormats) {
637 switch (supportedFormat.fourcc) {
638 case V4L2_PIX_FMT_Z16:
639 hasDepth = true;
640 break;
641 case V4L2_PIX_FMT_MJPEG:
642 hasColor = true;
643 break;
644 default:
645 ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
646 supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF,
647 (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF);
648 }
649 }
650
651 if (hasDepth) {
652 status_t ret = initOutputCharsKeysByFormat(
653 metadata, V4L2_PIX_FMT_Z16, halDepthFormats,
654 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
655 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
656 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
657 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
658 if (ret != OK) {
659 ALOGE("%s: Unable to initialize depth format keys: %s", __FUNCTION__,
660 statusToString(ret).c_str());
661 return ret;
662 }
663 }
664 if (hasColor) {
665 status_t ret =
666 initOutputCharsKeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
667 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
668 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
669 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
670 ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
671 if (ret != OK) {
672 ALOGE("%s: Unable to initialize color format keys: %s", __FUNCTION__,
673 statusToString(ret).c_str());
674 return ret;
675 }
676 }
677
678 status_t ret = calculateMinFps(metadata);
679 if (ret != OK) {
680 ALOGE("%s: Unable to update fps metadata: %s", __FUNCTION__, statusToString(ret).c_str());
681 return ret;
682 }
683
684 SupportedV4L2Format maximumFormat{.width = 0, .height = 0};
685 for (const auto& supportedFormat : mSupportedFormats) {
686 if (supportedFormat.width >= maximumFormat.width &&
687 supportedFormat.height >= maximumFormat.height) {
688 maximumFormat = supportedFormat;
689 }
690 }
691 int32_t activeArraySize[] = {0, 0, static_cast<int32_t>(maximumFormat.width),
692 static_cast<int32_t>(maximumFormat.height)};
693 UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, activeArraySize,
694 ARRAY_SIZE(activeArraySize));
695 UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, ARRAY_SIZE(activeArraySize));
696
697 int32_t pixelArraySize[] = {static_cast<int32_t>(maximumFormat.width),
698 static_cast<int32_t>(maximumFormat.height)};
699 UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, ARRAY_SIZE(pixelArraySize));
700 return OK;
701 }
702
703 template <size_t SIZE>
initOutputCharsKeysByFormat(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata,uint32_t fourcc,const std::array<int,SIZE> & halFormats,int streamConfigTag,int streamConfigurationKey,int minFrameDurationKey,int stallDurationKey)704 status_t ExternalCameraDevice::initOutputCharsKeysByFormat(
705 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
706 uint32_t fourcc, const std::array<int, SIZE>& halFormats, int streamConfigTag,
707 int streamConfigurationKey, int minFrameDurationKey, int stallDurationKey) {
708 if (mSupportedFormats.empty()) {
709 ALOGE("%s: Init supported format list failed", __FUNCTION__);
710 return UNKNOWN_ERROR;
711 }
712
713 std::vector<int32_t> streamConfigurations;
714 std::vector<int64_t> minFrameDurations;
715 std::vector<int64_t> stallDurations;
716
717 for (const auto& supportedFormat : mSupportedFormats) {
718 if (supportedFormat.fourcc != fourcc) {
719 // Skip 4CCs not meant for the halFormats
720 continue;
721 }
722 for (const auto& format : halFormats) {
723 streamConfigurations.push_back(format);
724 streamConfigurations.push_back(supportedFormat.width);
725 streamConfigurations.push_back(supportedFormat.height);
726 streamConfigurations.push_back(streamConfigTag);
727 }
728
729 int64_t minFrameDuration = std::numeric_limits<int64_t>::max();
730 for (const auto& fr : supportedFormat.frameRates) {
731 // 1000000000LL < (2^32 - 1) and
732 // fr.durationNumerator is uint32_t, so no overflow here
733 int64_t frameDuration = 1000000000LL * fr.durationNumerator / fr.durationDenominator;
734 if (frameDuration < minFrameDuration) {
735 minFrameDuration = frameDuration;
736 }
737 }
738
739 for (const auto& format : halFormats) {
740 minFrameDurations.push_back(format);
741 minFrameDurations.push_back(supportedFormat.width);
742 minFrameDurations.push_back(supportedFormat.height);
743 minFrameDurations.push_back(minFrameDuration);
744 }
745
746 // The stall duration is 0 for non-jpeg formats. For JPEG format, stall
747 // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG.
748 // TODO: b/72261675. Maybe set this dynamically
749 for (const auto& format : halFormats) {
750 const int64_t NS_TO_SECOND = 1E9;
751 int64_t stall_duration = (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0;
752 stallDurations.push_back(format);
753 stallDurations.push_back(supportedFormat.width);
754 stallDurations.push_back(supportedFormat.height);
755 stallDurations.push_back(stall_duration);
756 }
757 }
758
759 UPDATE(streamConfigurationKey, streamConfigurations.data(), streamConfigurations.size());
760
761 UPDATE(minFrameDurationKey, minFrameDurations.data(), minFrameDurations.size());
762
763 UPDATE(stallDurationKey, stallDurations.data(), stallDurations.size());
764
765 return OK;
766 }
767
calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata * metadata)768 status_t ExternalCameraDevice::calculateMinFps(
769 ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) {
770 std::set<int32_t> framerates;
771 int32_t minFps = std::numeric_limits<int32_t>::max();
772
773 for (const auto& supportedFormat : mSupportedFormats) {
774 for (const auto& fr : supportedFormat.frameRates) {
775 int32_t frameRateInt = static_cast<int32_t>(fr.getFramesPerSecond());
776 if (minFps > frameRateInt) {
777 minFps = frameRateInt;
778 }
779 framerates.insert(frameRateInt);
780 }
781 }
782
783 std::vector<int32_t> fpsRanges;
784 // FPS ranges
785 for (const auto& framerate : framerates) {
786 // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range
787 fpsRanges.push_back(framerate / 2);
788 fpsRanges.push_back(framerate);
789 }
790 minFps /= 2;
791 int64_t maxFrameDuration = 1000000000LL / minFps;
792
793 UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), fpsRanges.size());
794
795 UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1);
796
797 return OK;
798 }
799
800 #undef ARRAY_SIZE
801 #undef UPDATE
802
getFrameRateList(int fd,double fpsUpperBound,SupportedV4L2Format * format)803 void ExternalCameraDevice::getFrameRateList(int fd, double fpsUpperBound,
804 SupportedV4L2Format* format) {
805 format->frameRates.clear();
806
807 v4l2_frmivalenum frameInterval{
808 .index = 0,
809 .pixel_format = format->fourcc,
810 .width = static_cast<__u32>(format->width),
811 .height = static_cast<__u32>(format->height),
812 };
813
814 for (frameInterval.index = 0;
815 TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0;
816 ++frameInterval.index) {
817 if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
818 if (frameInterval.discrete.numerator != 0) {
819 SupportedV4L2Format::FrameRate fr = {frameInterval.discrete.numerator,
820 frameInterval.discrete.denominator};
821 double framerate = fr.getFramesPerSecond();
822 if (framerate > fpsUpperBound) {
823 continue;
824 }
825 ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", frameInterval.index,
826 frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF,
827 (frameInterval.pixel_format >> 16) & 0xFF,
828 (frameInterval.pixel_format >> 24) & 0xFF, frameInterval.width,
829 frameInterval.height, framerate);
830 format->frameRates.push_back(fr);
831 }
832 }
833 }
834
835 if (format->frameRates.empty()) {
836 ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", __FUNCTION__,
837 frameInterval.pixel_format & 0xFF, (frameInterval.pixel_format >> 8) & 0xFF,
838 (frameInterval.pixel_format >> 16) & 0xFF, (frameInterval.pixel_format >> 24) & 0xFF,
839 frameInterval.width, frameInterval.height);
840 }
841 }
842
updateFpsBounds(int fd,CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation> & fpsLimits,SupportedV4L2Format format,std::vector<SupportedV4L2Format> & outFmts)843 void ExternalCameraDevice::updateFpsBounds(
844 int fd, CroppingType cropType,
845 const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
846 SupportedV4L2Format format, std::vector<SupportedV4L2Format>& outFmts) {
847 double fpsUpperBound = -1.0;
848 for (const auto& limit : fpsLimits) {
849 if (cropType == VERTICAL) {
850 if (format.width <= limit.size.width) {
851 fpsUpperBound = limit.fpsUpperBound;
852 break;
853 }
854 } else { // HORIZONTAL
855 if (format.height <= limit.size.height) {
856 fpsUpperBound = limit.fpsUpperBound;
857 break;
858 }
859 }
860 }
861 if (fpsUpperBound < 0.f) {
862 return;
863 }
864
865 getFrameRateList(fd, fpsUpperBound, &format);
866 if (!format.frameRates.empty()) {
867 outFmts.push_back(format);
868 }
869 }
870
getCandidateSupportedFormatsLocked(int fd,CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation> & fpsLimits,const std::vector<ExternalCameraConfig::FpsLimitation> & depthFpsLimits,const Size & minStreamSize,bool depthEnabled)871 std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(
872 int fd, CroppingType cropType,
873 const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
874 const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
875 const Size& minStreamSize, bool depthEnabled) {
876 std::vector<SupportedV4L2Format> outFmts;
877 struct v4l2_fmtdesc fmtdesc {
878 .index = 0, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE
879 };
880 int ret = 0;
881 while (ret == 0) {
882 ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc));
883 ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, fmtdesc.pixelformat & 0xFF,
884 (fmtdesc.pixelformat >> 8) & 0xFF, (fmtdesc.pixelformat >> 16) & 0xFF,
885 (fmtdesc.pixelformat >> 24) & 0xFF);
886
887 if (ret != 0 || (fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) {
888 // Skip if IOCTL failed, or if the format is emulated
889 fmtdesc.index++;
890 continue;
891 }
892 auto it =
893 std::find(kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat);
894 if (it == kSupportedFourCCs.end()) {
895 fmtdesc.index++;
896 continue;
897 }
898
899 // Found supported format
900 v4l2_frmsizeenum frameSize{.index = 0, .pixel_format = fmtdesc.pixelformat};
901 for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;
902 ++frameSize.index) {
903 if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
904 ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,
905 fmtdesc.pixelformat & 0xFF, (fmtdesc.pixelformat >> 8) & 0xFF,
906 (fmtdesc.pixelformat >> 16) & 0xFF, (fmtdesc.pixelformat >> 24) & 0xFF,
907 frameSize.discrete.width, frameSize.discrete.height);
908
909 // Disregard h > w formats so all aspect ratio (h/w) <= 1.0
910 // This will simplify the crop/scaling logic down the road
911 if (frameSize.discrete.height > frameSize.discrete.width) {
912 continue;
913 }
914
915 // Discard all formats which is smaller than minStreamSize
916 if (frameSize.discrete.width < minStreamSize.width ||
917 frameSize.discrete.height < minStreamSize.height) {
918 continue;
919 }
920
921 SupportedV4L2Format format{
922 .width = static_cast<int32_t>(frameSize.discrete.width),
923 .height = static_cast<int32_t>(frameSize.discrete.height),
924 .fourcc = fmtdesc.pixelformat};
925
926 if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {
927 updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);
928 } else {
929 updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);
930 }
931 }
932 }
933 fmtdesc.index++;
934 }
935 trimSupportedFormats(cropType, &outFmts);
936 return outFmts;
937 }
938
trimSupportedFormats(CroppingType cropType,std::vector<SupportedV4L2Format> * pFmts)939 void ExternalCameraDevice::trimSupportedFormats(CroppingType cropType,
940 std::vector<SupportedV4L2Format>* pFmts) {
941 std::vector<SupportedV4L2Format>& sortedFmts = *pFmts;
942 if (cropType == VERTICAL) {
943 std::sort(sortedFmts.begin(), sortedFmts.end(),
944 [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
945 if (a.width == b.width) {
946 return a.height < b.height;
947 }
948 return a.width < b.width;
949 });
950 } else {
951 std::sort(sortedFmts.begin(), sortedFmts.end(),
952 [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool {
953 if (a.height == b.height) {
954 return a.width < b.width;
955 }
956 return a.height < b.height;
957 });
958 }
959
960 if (sortedFmts.empty()) {
961 ALOGE("%s: input format list is empty!", __FUNCTION__);
962 return;
963 }
964
965 const auto& maxSize = sortedFmts[sortedFmts.size() - 1];
966 float maxSizeAr = ASPECT_RATIO(maxSize);
967
968 // Remove formats that has aspect ratio not croppable from largest size
969 std::vector<SupportedV4L2Format> out;
970 for (const auto& fmt : sortedFmts) {
971 float ar = ASPECT_RATIO(fmt);
972 if (isAspectRatioClose(ar, maxSizeAr)) {
973 out.push_back(fmt);
974 } else if (cropType == HORIZONTAL && ar < maxSizeAr) {
975 out.push_back(fmt);
976 } else if (cropType == VERTICAL && ar > maxSizeAr) {
977 out.push_back(fmt);
978 } else {
979 ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", __FUNCTION__,
980 fmt.width, fmt.height, cropType == VERTICAL ? "vertically" : "horizontally",
981 maxSize.width, maxSize.height);
982 }
983 }
984 sortedFmts = out;
985 }
986
dump(int fd,const char ** args,uint32_t numArgs)987 binder_status_t ExternalCameraDevice::dump(int fd, const char** args, uint32_t numArgs) {
988 std::shared_ptr<ExternalCameraDeviceSession> session = mSession.lock();
989 if (session == nullptr) {
990 dprintf(fd, "No active camera device session instance\n");
991 return STATUS_OK;
992 }
993
994 return session->dump(fd, args, numArgs);
995 }
996
997 } // namespace implementation
998 } // namespace device
999 } // namespace camera
1000 } // namespace hardware
1001 } // namespace android