1 /*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <cutils/properties.h>
18
19 #include "SessionConfigurationUtils.h"
20 #include <android/data_space.h>
21 #include <camera/StringUtils.h>
22 #include <gui/Flags.h> // remove with WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
23 #include <ui/PublicFormat.h>
24 #include "../CameraService.h"
25 #include "../api2/DepthCompositeStream.h"
26 #include "../api2/HeicCompositeStream.h"
27 #include "SessionConfigurationUtils.h"
28 #include "aidl/android/hardware/graphics/common/Dataspace.h"
29 #include "api2/JpegRCompositeStream.h"
30 #include "binder/Status.h"
31 #include "common/CameraDeviceBase.h"
32 #include "common/HalConversionsTemplated.h"
33 #include "device3/Camera3OutputStream.h"
34 #include "device3/ZoomRatioMapper.h"
35 #include "device3/aidl/AidlCamera3Device.h"
36 #include "device3/hidl/HidlCamera3Device.h"
37 #include "system/graphics-base-v1.1.h"
38
39 using android::camera3::OutputStreamInfo;
40 using android::camera3::OutputStreamInfo;
41 using android::hardware::camera2::ICameraDeviceUser;
42 using aidl::android::hardware::camera::device::RequestTemplate;
43
44 namespace android {
45 namespace camera3 {
46 namespace flags = com::android::internal::camera::flags;
47
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)48 void StreamConfiguration::getStreamConfigurations(
49 const CameraMetadata &staticInfo, int configuration,
50 std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
51 if (scm == nullptr) {
52 ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
53 return;
54 }
55 const int STREAM_FORMAT_OFFSET = 0;
56 const int STREAM_WIDTH_OFFSET = 1;
57 const int STREAM_HEIGHT_OFFSET = 2;
58 const int STREAM_IS_INPUT_OFFSET = 3;
59
60 camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
61 for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
62 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
63 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
64 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
65 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
66 StreamConfiguration sc = {format, width, height, isInput};
67 (*scm)[format].push_back(sc);
68 }
69 }
70
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)71 void StreamConfiguration::getStreamConfigurations(
72 const CameraMetadata &staticInfo, bool maxRes,
73 std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
74 int32_t scalerKey =
75 SessionConfigurationUtils::getAppropriateModeTag(
76 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
77
78 int32_t depthKey =
79 SessionConfigurationUtils::getAppropriateModeTag(
80 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
81
82 int32_t dynamicDepthKey =
83 SessionConfigurationUtils::getAppropriateModeTag(
84 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
85
86 int32_t heicKey =
87 SessionConfigurationUtils::getAppropriateModeTag(
88 ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
89
90 getStreamConfigurations(staticInfo, scalerKey, scm);
91 getStreamConfigurations(staticInfo, depthKey, scm);
92 getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
93 getStreamConfigurations(staticInfo, heicKey, scm);
94 }
95
96 namespace SessionConfigurationUtils {
97
98 int32_t PERF_CLASS_LEVEL =
99 property_get_int32("ro.odm.build.media_performance_class", 0);
100
101 bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
102
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)103 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
104 bool ultraHighResolution) {
105 int32_t maxJpegWidth = 0, maxJpegHeight = 0;
106 const int STREAM_CONFIGURATION_SIZE = 4;
107 const int STREAM_FORMAT_OFFSET = 0;
108 const int STREAM_WIDTH_OFFSET = 1;
109 const int STREAM_HEIGHT_OFFSET = 2;
110 const int STREAM_IS_INPUT_OFFSET = 3;
111
112 int32_t scalerSizesTag = ultraHighResolution ?
113 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
114 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
115 camera_metadata_ro_entry_t availableStreamConfigs =
116 metadata.find(scalerSizesTag);
117 if (availableStreamConfigs.count == 0 ||
118 availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
119 return camera3::Size(0, 0);
120 }
121
122 // Get max jpeg size (area-wise).
123 for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
124 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
125 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
126 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
127 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
128 if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
129 && format == HAL_PIXEL_FORMAT_BLOB &&
130 (width * height > maxJpegWidth * maxJpegHeight)) {
131 maxJpegWidth = width;
132 maxJpegHeight = height;
133 }
134 }
135
136 return camera3::Size(maxJpegWidth, maxJpegHeight);
137 }
138
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)139 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
140 camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
141 return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
142 (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
143 }
144
145 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)146 getStreamConfigurationPair(const CameraMetadata &staticInfo) {
147 camera3::StreamConfigurationPair streamConfigurationPair;
148 camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
149 &streamConfigurationPair.mDefaultStreamConfigurationMap);
150 camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
151 &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
152 return streamConfigurationPair;
153 }
154
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)155 int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
156 int64_t d0 = x0 - x1;
157 int64_t d1 = y0 - y1;
158 return d0 * d0 + d1 * d1;
159 }
160
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)161 bool roundBufferDimensionNearest(int32_t width, int32_t height,
162 int32_t format, android_dataspace dataSpace,
163 const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
164 /*out*/int32_t* outHeight) {
165 const int32_t depthSizesTag =
166 getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
167 maxResolution);
168 const int32_t scalerSizesTag =
169 getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
170 const int32_t heicSizesTag =
171 getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
172 const int32_t jpegRSizesTag = getAppropriateModeTag(
173 ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
174 const int32_t heicUltraHDRSizesTag = getAppropriateModeTag(
175 ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
176
177 bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
178 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
179 bool isHeicUltraHDRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
180 ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR));
181 camera_metadata_ro_entry streamConfigs =
182 (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
183 (isHeicUltraHDRDataSpace) ? info.find(heicUltraHDRSizesTag) :
184 (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
185 (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
186 info.find(heicSizesTag) :
187 info.find(scalerSizesTag);
188
189 int32_t bestWidth = -1;
190 int32_t bestHeight = -1;
191
192 // Iterate through listed stream configurations and find the one with the smallest euclidean
193 // distance from the given dimensions for the given format.
194 for (size_t i = 0; i < streamConfigs.count; i += 4) {
195 int32_t fmt = streamConfigs.data.i32[i];
196 int32_t w = streamConfigs.data.i32[i + 1];
197 int32_t h = streamConfigs.data.i32[i + 2];
198
199 // Ignore input/output type for now
200 if (fmt == format) {
201 if (w == width && h == height) {
202 bestWidth = width;
203 bestHeight = height;
204 break;
205 } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
206 SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
207 SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
208 height))) {
209 bestWidth = w;
210 bestHeight = h;
211 }
212 }
213 }
214
215 if (bestWidth == -1) {
216 // Return false if no configurations for this format were listed
217 ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
218 __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
219 return false;
220 }
221
222 // Set the outputs to the closet width/height
223 if (outWidth != NULL) {
224 *outWidth = bestWidth;
225 }
226 if (outHeight != NULL) {
227 *outHeight = bestHeight;
228 }
229
230 // Return true if at least one configuration for this format was listed
231 return true;
232 }
233
234 //check if format is 10-bit compatible
is10bitCompatibleFormat(int32_t format,android_dataspace_t dataSpace)235 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
236 switch(format) {
237 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
238 case HAL_PIXEL_FORMAT_YCBCR_P010:
239 return true;
240 case HAL_PIXEL_FORMAT_BLOB:
241 if (dataSpace == static_cast<android_dataspace_t>(
242 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
243 return true;
244 } else if (dataSpace == static_cast<android_dataspace_t>(
245 ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR)) {
246 return true;
247 }
248
249 return false;
250 default:
251 return false;
252 }
253 }
254
isDynamicRangeProfileSupported(int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)255 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
256 if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
257 // Supported by default
258 return true;
259 }
260
261 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
262 bool is10bitDynamicRangeSupported = false;
263 for (size_t i = 0; i < entry.count; ++i) {
264 uint8_t capability = entry.data.u8[i];
265 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
266 is10bitDynamicRangeSupported = true;
267 break;
268 }
269 }
270
271 if (!is10bitDynamicRangeSupported) {
272 return false;
273 }
274
275 switch (dynamicRangeProfile) {
276 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
277 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
278 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
279 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
280 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
281 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
282 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
283 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
284 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
285 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
286 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
287 entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
288 for (size_t i = 0; i < entry.count; i += 3) {
289 if (dynamicRangeProfile == entry.data.i64[i]) {
290 return true;
291 }
292 }
293
294 return false;
295 default:
296 return false;
297 }
298
299 return false;
300 }
301
302 //check if format is 10-bit compatible
is10bitDynamicRangeProfile(int64_t dynamicRangeProfile)303 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
304 switch (dynamicRangeProfile) {
305 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
306 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
307 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
308 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
309 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
310 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
311 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
312 return true;
313 default:
314 return false;
315 }
316 }
317
deviceReportsColorSpaces(const CameraMetadata & staticInfo)318 bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
319 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
320 for (size_t i = 0; i < entry.count; ++i) {
321 uint8_t capability = entry.data.u8[i];
322 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
323 return true;
324 }
325 }
326
327 return false;
328 }
329
isColorSpaceSupported(int32_t colorSpace,int32_t format,android_dataspace dataSpace,int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)330 bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
331 int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
332 int64_t colorSpace64 = colorSpace;
333 int64_t format64 = format;
334
335 // Translate HAL format + data space to public format
336 if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
337 format64 = 0x100; // JPEG
338 } else if (format == HAL_PIXEL_FORMAT_BLOB
339 && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
340 format64 = 0x48454946; // HEIC
341 } else if (format == HAL_PIXEL_FORMAT_BLOB
342 && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
343 format64 = 0x69656963; // DEPTH_JPEG
344 } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
345 return false; // DEPTH_POINT_CLOUD, not applicable
346 } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
347 return false; // DEPTH16, not applicable
348 } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
349 return false; // RAW_DEPTH, not applicable
350 } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
351 return false; // RAW_DEPTH10, not applicable
352 } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
353 static_cast<android_dataspace>(
354 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
355 format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
356 } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
357 static_cast<android_dataspace>(
358 ::aidl::android::hardware::graphics::common::Dataspace::HEIF_ULTRAHDR)) {
359 format64 = static_cast<int64_t>(PublicFormat::HEIC_ULTRAHDR);
360 }
361
362 camera_metadata_ro_entry_t entry =
363 staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
364 for (size_t i = 0; i < entry.count; i += 3) {
365 bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
366 bool isDynamicProfileCompatible =
367 (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
368
369 if (colorSpace64 == entry.data.i64[i]
370 && isFormatCompatible
371 && isDynamicProfileCompatible) {
372 return true;
373 }
374 }
375
376 ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
377 " combination not found", colorSpace, format64, dynamicRangeProfile);
378 return false;
379 }
380
isPublicFormat(int32_t format)381 bool isPublicFormat(int32_t format)
382 {
383 switch(format) {
384 case HAL_PIXEL_FORMAT_RGBA_8888:
385 case HAL_PIXEL_FORMAT_RGBX_8888:
386 case HAL_PIXEL_FORMAT_RGB_888:
387 case HAL_PIXEL_FORMAT_RGB_565:
388 case HAL_PIXEL_FORMAT_BGRA_8888:
389 case HAL_PIXEL_FORMAT_YV12:
390 case HAL_PIXEL_FORMAT_Y8:
391 case HAL_PIXEL_FORMAT_Y16:
392 case HAL_PIXEL_FORMAT_RAW16:
393 case HAL_PIXEL_FORMAT_RAW10:
394 case HAL_PIXEL_FORMAT_RAW12:
395 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
396 case HAL_PIXEL_FORMAT_BLOB:
397 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
398 case HAL_PIXEL_FORMAT_YCbCr_420_888:
399 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
400 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
401 case HAL_PIXEL_FORMAT_YCbCr_422_I:
402 return true;
403 default:
404 return false;
405 }
406 }
407
dataSpaceFromColorSpace(android_dataspace * dataSpace,int32_t colorSpace)408 bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
409 switch (colorSpace) {
410 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
411 *dataSpace = HAL_DATASPACE_V0_SRGB;
412 return true;
413 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
414 *dataSpace = HAL_DATASPACE_DISPLAY_P3;
415 return true;
416 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
417 *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
418 return true;
419 default:
420 ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
421 return false;
422 }
423 }
424
isStreamUseCaseSupported(int64_t streamUseCase,const CameraMetadata & deviceInfo)425 bool isStreamUseCaseSupported(int64_t streamUseCase,
426 const CameraMetadata &deviceInfo) {
427 camera_metadata_ro_entry_t availableStreamUseCases =
428 deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
429
430 if (availableStreamUseCases.count == 0 &&
431 streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
432 return true;
433 }
434 // Allow vendor stream use case unconditionally.
435 if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
436 return true;
437 }
438
439 for (size_t i = 0; i < availableStreamUseCases.count; i++) {
440 if (availableStreamUseCases.data.i64[i] == streamUseCase) {
441 return true;
442 }
443 }
444 return false;
445 }
446
createConfiguredSurface(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & out_surface,const sp<SurfaceType> & surface,const std::string & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed,int64_t dynamicRangeProfile,int64_t streamUseCase,int timestampBase,int mirrorMode,int32_t colorSpace,bool respectSurfaceSize)447 binder::Status createConfiguredSurface(
448 OutputStreamInfo& streamInfo, bool isStreamInfoValid,
449 sp<Surface>& out_surface, const sp<SurfaceType>& surface,
450 const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
451 const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
452 int64_t streamUseCase, int timestampBase, int mirrorMode,
453 int32_t colorSpace, bool respectSurfaceSize) {
454 // bufferProducer must be non-null
455 if ( flagtools::isSurfaceTypeValid(surface) == false ) {
456 std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
457 ALOGW("%s: %s", __FUNCTION__, msg.c_str());
458 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
459 }
460 // HACK b/10949105
461 // Query consumer usage bits to set async operation mode for
462 // GLConsumer using controlledByApp parameter.
463 bool useAsync = false;
464 uint64_t consumerUsage = 0;
465 status_t err;
466 if ((err = surface->getConsumerUsage(&consumerUsage)) != OK) {
467 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
468 logicalCameraId.c_str(), strerror(-err), err);
469 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
470 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
471 }
472 if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
473 ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
474 "stream", __FUNCTION__, logicalCameraId.c_str(), consumerUsage);
475 useAsync = true;
476 }
477
478 uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
479 GRALLOC_USAGE_RENDERSCRIPT;
480 uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
481 GraphicBuffer::USAGE_HW_TEXTURE |
482 GraphicBuffer::USAGE_HW_COMPOSER;
483 bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
484 (consumerUsage & allowedFlags) != 0;
485
486 out_surface = new Surface(flagtools::surfaceTypeToIGBP(surface), useAsync);
487
488 ANativeWindow *anw = out_surface.get();
489
490 int width, height, format;
491 android_dataspace dataSpace;
492 if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
493 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface width: %s (%d)",
494 logicalCameraId.c_str(), strerror(-err), err);
495 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
496 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
497 }
498 if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
499 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface height: %s (%d)",
500 logicalCameraId.c_str(), strerror(-err), err);
501 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
502 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
503 }
504 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
505 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface format: %s (%d)",
506 logicalCameraId.c_str(), strerror(-err), err);
507 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
508 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
509 }
510 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
511 reinterpret_cast<int*>(&dataSpace))) != OK) {
512 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface dataspace: %s (%d)",
513 logicalCameraId.c_str(), strerror(-err), err);
514 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
515 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
516 }
517
518 if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
519 format != HAL_PIXEL_FORMAT_BLOB) {
520 if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
521 std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
522 "convert to data space", logicalCameraId.c_str(), colorSpace);
523 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
524 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
525 }
526 }
527
528 // FIXME: remove this override since the default format should be
529 // IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
530 if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
531 ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
532 ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
533 ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
534 __FUNCTION__, logicalCameraId.c_str(), format);
535 format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
536 }
537 std::unordered_set<int32_t> overriddenSensorPixelModes;
538 if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
539 physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
540 std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
541 "format %#x are not valid",logicalCameraId.c_str(), format);
542 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
543 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
544 }
545 bool foundInMaxRes = false;
546 if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
547 overriddenSensorPixelModes.end()) {
548 // we can use the default stream configuration map
549 foundInMaxRes = true;
550 }
551 // Round dimensions to the nearest dimensions available for this format.
552 // Only do the rounding if the client doesn't ask to respect the surface
553 // size.
554 if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
555 !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
556 format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
557 /*out*/&height)) {
558 std::string msg = fmt::sprintf("Camera %s: No supported stream configurations with "
559 "format %#x defined, failed to create output stream",
560 logicalCameraId.c_str(), format);
561 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
562 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
563 }
564 if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
565 physicalCameraMetadata)) {
566 std::string msg = fmt::sprintf("Camera %s: Dynamic range profile 0x%" PRIx64
567 " not supported,failed to create output stream", logicalCameraId.c_str(),
568 dynamicRangeProfile);
569 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
570 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
571 }
572 if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
573 !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
574 std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
575 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
576 logicalCameraId.c_str(), format, dynamicRangeProfile);
577 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
578 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
579 }
580 if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
581 SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
582 !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
583 dynamicRangeProfile, physicalCameraMetadata)) {
584 std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
585 "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
586 logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
587 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
588 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
589 }
590 if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
591 physicalCameraMetadata)) {
592 std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
593 " failed to create output stream", logicalCameraId.c_str(), streamUseCase);
594 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
595 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
596 }
597 if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
598 timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
599 std::string msg = fmt::sprintf("Camera %s: invalid timestamp base %d",
600 logicalCameraId.c_str(), timestampBase);
601 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
602 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
603 }
604 if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
605 mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
606 std::string msg = fmt::sprintf("Camera %s: invalid mirroring mode %d",
607 logicalCameraId.c_str(), mirrorMode);
608 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
609 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
610 }
611
612 if (!isStreamInfoValid) {
613 streamInfo.width = width;
614 streamInfo.height = height;
615 streamInfo.format = format;
616 streamInfo.dataSpace = dataSpace;
617 streamInfo.consumerUsage = consumerUsage;
618 streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
619 streamInfo.dynamicRangeProfile = dynamicRangeProfile;
620 streamInfo.streamUseCase = streamUseCase;
621 streamInfo.timestampBase = timestampBase;
622 streamInfo.colorSpace = colorSpace;
623 return binder::Status::ok();
624 }
625 if (width != streamInfo.width) {
626 std::string msg = fmt::sprintf("Camera %s:Surface width doesn't match: %d vs %d",
627 logicalCameraId.c_str(), width, streamInfo.width);
628 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
629 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
630 }
631 if (height != streamInfo.height) {
632 std::string msg = fmt::sprintf("Camera %s:Surface height doesn't match: %d vs %d",
633 logicalCameraId.c_str(), height, streamInfo.height);
634 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
635 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
636 }
637 if (format != streamInfo.format) {
638 std::string msg = fmt::sprintf("Camera %s:Surface format doesn't match: %d vs %d",
639 logicalCameraId.c_str(), format, streamInfo.format);
640 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
641 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
642 }
643 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
644 if (dataSpace != streamInfo.dataSpace) {
645 std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
646 logicalCameraId.c_str(), static_cast<int>(dataSpace), static_cast<int>(streamInfo.dataSpace));
647 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
648 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
649 }
650 //At the native side, there isn't a way to check whether 2 surfaces come from the same
651 //surface class type. Use usage flag to approximate the comparison.
652 if (consumerUsage != streamInfo.consumerUsage) {
653 std::string msg = fmt::sprintf(
654 "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
655 logicalCameraId.c_str(), consumerUsage, streamInfo.consumerUsage);
656 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
657 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
658 }
659 }
660 return binder::Status::ok();
661 }
662
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,const std::string & physicalId,int32_t groupId,aidl::android::hardware::camera::device::Stream * stream)663 void mapStreamInfo(const OutputStreamInfo &streamInfo,
664 camera3::camera_stream_rotation_t rotation, const std::string &physicalId,
665 int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
666 if (stream == nullptr) {
667 return;
668 }
669
670 stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
671 stream->width = streamInfo.width;
672 stream->height = streamInfo.height;
673 stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
674 auto u = streamInfo.consumerUsage;
675 camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
676 stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
677 stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
678 stream->colorSpace = streamInfo.colorSpace;
679 stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
680 stream->id = -1; // Invalid stream id
681 stream->physicalCameraId = physicalId;
682 stream->bufferSize = 0;
683 stream->groupId = groupId;
684 stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
685 size_t idx = 0;
686 using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
687 for (auto mode : streamInfo.sensorPixelModesUsed) {
688 stream->sensorPixelModesUsed[idx++] =
689 static_cast<SensorPixelMode>(mode);
690 }
691 using DynamicRangeProfile =
692 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
693 stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
694 using StreamUseCases =
695 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
696 stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
697 }
698
mapStream(const OutputStreamInfo & streamInfo,bool isCompositeJpegRDisabled,bool isCompositeHeicDisabled,bool isCompositeHeicUltraHDRDisabled,const CameraMetadata & deviceInfo,camera_stream_rotation_t rotation,size_t * streamIdx,const std::string & physicalId,int32_t groupId,const std::string & logicalCameraId,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool * earlyExit)699 binder::Status mapStream(
700 const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
701 bool isCompositeHeicDisabled, bool isCompositeHeicUltraHDRDisabled,
702 const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
703 size_t* streamIdx /*out*/, const std::string& physicalId, int32_t groupId,
704 const std::string& logicalCameraId,
705 aidl::android::hardware::camera::device::StreamConfiguration& streamConfiguration /*out*/,
706 bool* earlyExit /*out*/) {
707 bool isDepthCompositeStream =
708 camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
709 bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStreamInfo(
710 streamInfo, isCompositeHeicDisabled, isCompositeHeicUltraHDRDisabled);
711 bool isJpegRCompositeStream =
712 camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
713 !isCompositeJpegRDisabled;
714 if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
715 // We need to take in to account that composite streams can have
716 // additional internal camera streams.
717 std::vector<OutputStreamInfo> compositeStreams;
718 status_t ret;
719 if (isDepthCompositeStream) {
720 // TODO: Take care of composite streams.
721 ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
722 deviceInfo, &compositeStreams);
723 } else if (isHeicCompositeStream) {
724 ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
725 deviceInfo, &compositeStreams);
726 } else {
727 ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
728 deviceInfo, &compositeStreams);
729 }
730
731 if (ret != OK) {
732 std::string msg = fmt::sprintf(
733 "Camera %s: Failed adding composite streams: %s (%d)",
734 logicalCameraId.c_str(), strerror(-ret), ret);
735 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
736 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
737 }
738
739 if (compositeStreams.size() == 0) {
740 // No internal streams means composite stream not
741 // supported.
742 *earlyExit = true;
743 return binder::Status::ok();
744 } else if (compositeStreams.size() > 1) {
745 size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
746 streamConfiguration.streams.resize(streamCount);
747 }
748
749 for (const auto& compositeStream : compositeStreams) {
750 mapStreamInfo(compositeStream, rotation,
751 physicalId, groupId,
752 &streamConfiguration.streams[(*streamIdx)++]);
753 }
754 } else {
755 mapStreamInfo(streamInfo, rotation,
756 physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
757 }
758
759 return binder::Status::ok();
760 }
761
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const std::string & logicalCameraId,const CameraMetadata & deviceInfo,bool isCompositeJpegRDisabled,bool isCompositeHeicDisabled,bool isCompositeHeicUltraHDRDisabled,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,metadata_vendor_id_t vendorTagId,bool checkSessionParams,const std::vector<int32_t> & additionalKeys,bool * earlyExit)762 binder::Status convertToHALStreamCombination(
763 const SessionConfiguration& sessionConfiguration, const std::string& logicalCameraId,
764 const CameraMetadata& deviceInfo, bool isCompositeJpegRDisabled,
765 bool isCompositeHeicDisabled, bool isCompositeHeicUltraHDRDisabled,
766 metadataGetter getMetadata, const std::vector<std::string>& physicalCameraIds,
767 aidl::android::hardware::camera::device::StreamConfiguration& streamConfiguration,
768 bool overrideForPerfClass, metadata_vendor_id_t vendorTagId, bool checkSessionParams,
769 const std::vector<int32_t>& additionalKeys, bool* earlyExit) {
770 using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
771 auto operatingMode = sessionConfiguration.getOperatingMode();
772 binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
773 logicalCameraId);
774 if (!res.isOk()) {
775 return res;
776 }
777
778 if (earlyExit == nullptr) {
779 std::string msg("earlyExit nullptr");
780 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
781 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
782 }
783 *earlyExit = false;
784 auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
785 static_cast<camera_stream_configuration_mode_t> (operatingMode),
786 /*out*/ &streamConfiguration.operationMode);
787 if (ret != OK) {
788 std::string msg = fmt::sprintf(
789 "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
790 logicalCameraId.c_str(), operatingMode, strerror(-ret), ret);
791 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
792 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
793 msg.c_str());
794 }
795
796 bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
797 (sessionConfiguration.getInputHeight() > 0) &&
798 (sessionConfiguration.getInputFormat() > 0);
799 auto outputConfigs = sessionConfiguration.getOutputConfigurations();
800 size_t streamCount = outputConfigs.size();
801 streamCount = isInputValid ? streamCount + 1 : streamCount;
802 streamConfiguration.streams.resize(streamCount);
803 size_t streamIdx = 0;
804 if (isInputValid) {
805 std::vector<SensorPixelMode> defaultSensorPixelModes;
806 defaultSensorPixelModes.resize(1);
807 defaultSensorPixelModes[0] =
808 static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
809 aidl::android::hardware::camera::device::Stream stream;
810 stream.id = 0;
811 stream.streamType = aidl::android::hardware::camera::device::StreamType::INPUT;
812 stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
813 stream.height = static_cast<uint32_t> (sessionConfiguration.getInputHeight());
814 stream.format =
815 AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
816 sessionConfiguration.getInputFormat());
817 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
818 stream.dataSpace =
819 static_cast<aidl::android::hardware::graphics::common::Dataspace>(
820 HAL_DATASPACE_UNKNOWN);
821 stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
822 stream.bufferSize = 0;
823 stream.groupId = -1;
824 stream.sensorPixelModesUsed = defaultSensorPixelModes;
825 using DynamicRangeProfile =
826 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
827 stream.dynamicRangeProfile =
828 DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
829 streamConfiguration.streams[streamIdx++] = stream;
830 streamConfiguration.multiResolutionInputImage =
831 sessionConfiguration.inputIsMultiResolution();
832 }
833
834 for (const auto &it : outputConfigs) {
835 const std::vector<ParcelableSurfaceType>& surfaces = it.getSurfaces();
836 bool deferredConsumer = it.isDeferred();
837 bool isConfigurationComplete = it.isComplete();
838 const std::string &physicalCameraId = it.getPhysicalCameraId();
839
840 int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
841 int32_t colorSpace = it.getColorSpace();
842 std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
843 const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
844 overrideForPerfClass);
845 const CameraMetadata &metadataChosen =
846 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
847
848 size_t numSurfaces = surfaces.size();
849 bool isStreamInfoValid = false;
850 int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
851 OutputStreamInfo streamInfo;
852
853 res = checkSurfaceType(numSurfaces, deferredConsumer, it.getSurfaceType(),
854 isConfigurationComplete);
855 if (!res.isOk()) {
856 return res;
857 }
858 res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
859 logicalCameraId);
860 if (!res.isOk()) {
861 return res;
862 }
863
864 int64_t streamUseCase = it.getStreamUseCase();
865 int timestampBase = it.getTimestampBase();
866 // If the configuration is a deferred consumer, or a not yet completed
867 // configuration with no buffer producers attached.
868 if (deferredConsumer || (!isConfigurationComplete && numSurfaces == 0)) {
869 streamInfo.width = it.getWidth();
870 streamInfo.height = it.getHeight();
871 auto surfaceType = it.getSurfaceType();
872 switch (surfaceType) {
873 case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
874 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
875 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
876 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
877 break;
878 case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
879 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
880 | GraphicBuffer::USAGE_HW_COMPOSER;
881 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
882 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
883 break;
884 case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
885 case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
886 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
887 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
888 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
889 break;
890 case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
891 streamInfo.consumerUsage = it.getUsage();
892 streamInfo.format = it.getFormat();
893 streamInfo.dataSpace = (android_dataspace)it.getDataspace();
894 break;
895 default:
896 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
897 "Invalid surface type.");
898 }
899 streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
900 if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
901 streamInfo.format, streamInfo.width,
902 streamInfo.height, metadataChosen,
903 &streamInfo.sensorPixelModesUsed) != OK) {
904 ALOGE("%s: Deferred surface sensor pixel modes not valid",
905 __FUNCTION__);
906 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
907 "Deferred surface sensor pixel modes not valid");
908 }
909 streamInfo.streamUseCase = streamUseCase;
910 auto status = mapStream(streamInfo, isCompositeJpegRDisabled, isCompositeHeicDisabled,
911 isCompositeHeicUltraHDRDisabled, deviceInfo,
912 camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId,
913 groupId, logicalCameraId, streamConfiguration, earlyExit);
914 if (*earlyExit || !status.isOk()) {
915 return status;
916 }
917
918 isStreamInfoValid = true;
919
920 if (numSurfaces == 0) {
921 continue;
922 }
923 }
924
925 for (auto& surface_type : surfaces) {
926 sp<Surface> surface;
927 int mirrorMode = it.getMirrorMode(surface_type);
928 res = createConfiguredSurface(streamInfo, isStreamInfoValid, surface,
929 flagtools::convertParcelableSurfaceTypeToSurface(surface_type),
930 logicalCameraId, metadataChosen, sensorPixelModesUsed,
931 dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode,
932 colorSpace, /*respectSurfaceSize*/ true);
933
934 if (!res.isOk()) return res;
935
936 if (!isStreamInfoValid) {
937 auto status = mapStream(
938 streamInfo, isCompositeJpegRDisabled, isCompositeHeicDisabled,
939 isCompositeHeicUltraHDRDisabled, deviceInfo,
940 static_cast<camera_stream_rotation_t>(it.getRotation()), &streamIdx,
941 physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
942 if (*earlyExit || !status.isOk()) {
943 return status;
944 }
945 isStreamInfoValid = true;
946 }
947 }
948 }
949
950 if (checkSessionParams) {
951 const CameraMetadata &deviceInfo = getMetadata(logicalCameraId,
952 /*overrideForPerfClass*/false);
953 CameraMetadata filteredParams;
954
955 filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
956 additionalKeys, vendorTagId, filteredParams);
957
958 camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
959 uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
960 streamConfiguration.sessionParams.metadata.assign(metadataP,
961 metadataP + get_camera_metadata_size(metadata));
962 }
963
964 return binder::Status::ok();
965 }
966
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const std::string & physicalCameraId,const std::string & logicalCameraId)967 binder::Status checkPhysicalCameraId(
968 const std::vector<std::string> &physicalCameraIds, const std::string &physicalCameraId,
969 const std::string &logicalCameraId) {
970 if (physicalCameraId.size() == 0) {
971 return binder::Status::ok();
972 }
973 if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
974 physicalCameraId) == physicalCameraIds.end()) {
975 std::string msg = fmt::sprintf("Camera %s: Camera doesn't support physicalCameraId %s.",
976 logicalCameraId.c_str(), physicalCameraId.c_str());
977 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
978 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
979 }
980 return binder::Status::ok();
981 }
982
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType,bool isConfigurationComplete)983 binder::Status checkSurfaceType(size_t numBufferProducers,
984 bool deferredConsumer, int surfaceType, bool isConfigurationComplete) {
985 if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
986 ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
987 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
988 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
989 } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
990 ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
991 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
992 }
993
994 if (deferredConsumer) {
995 bool validSurfaceType = (
996 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
997 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
998 if (!validSurfaceType) {
999 std::string msg = fmt::sprintf("Deferred target surface has invalid "
1000 "surfaceType = %d.", surfaceType);
1001 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1002 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
1003 }
1004 } else if (!isConfigurationComplete && numBufferProducers == 0) {
1005 bool validSurfaceType = (
1006 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
1007 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
1008 (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
1009 if (!validSurfaceType) {
1010 std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
1011 "surfaceType = %d.", surfaceType);
1012 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1013 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
1014 }
1015 }
1016
1017 return binder::Status::ok();
1018 }
1019
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const std::string & cameraId)1020 binder::Status checkOperatingMode(int operatingMode,
1021 const CameraMetadata &staticInfo, const std::string &cameraId) {
1022 if (operatingMode < 0) {
1023 std::string msg = fmt::sprintf(
1024 "Camera %s: Invalid operating mode %d requested", cameraId.c_str(), operatingMode);
1025 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1026 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1027 msg.c_str());
1028 }
1029
1030 bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
1031 if (isConstrainedHighSpeed) {
1032 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1033 bool isConstrainedHighSpeedSupported = false;
1034 for(size_t i = 0; i < entry.count; ++i) {
1035 uint8_t capability = entry.data.u8[i];
1036 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
1037 isConstrainedHighSpeedSupported = true;
1038 break;
1039 }
1040 }
1041 if (!isConstrainedHighSpeedSupported) {
1042 std::string msg = fmt::sprintf(
1043 "Camera %s: Try to create a constrained high speed configuration on a device"
1044 " that doesn't support it.", cameraId.c_str());
1045 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1046 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1047 msg.c_str());
1048 }
1049 }
1050
1051 return binder::Status::ok();
1052 }
1053
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)1054 static bool inStreamConfigurationMap(int format, int width, int height,
1055 const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
1056 auto scs = sm.find(format);
1057 if (scs == sm.end()) {
1058 return false;
1059 }
1060 for (auto &sc : scs->second) {
1061 if (sc.width == width && sc.height == height && sc.isInput == 0) {
1062 return true;
1063 }
1064 }
1065 return false;
1066 }
1067
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)1068 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
1069 return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
1070 }
1071
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)1072 status_t checkAndOverrideSensorPixelModesUsed(
1073 const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
1074 const CameraMetadata &staticInfo,
1075 std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
1076
1077 const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
1078 convertToSet(sensorPixelModesUsed);
1079 if (!supportsUltraHighResolutionCapture(staticInfo)) {
1080 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1081 sensorPixelModesUsedSet.end()) {
1082 // invalid value for non ultra high res sensors
1083 ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
1084 "support ultra high resolution capture", __FUNCTION__);
1085 return BAD_VALUE;
1086 }
1087 overriddenSensorPixelModesUsed->clear();
1088 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1089 return OK;
1090 }
1091
1092 StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
1093
1094 bool isInDefaultStreamConfigurationMap =
1095 inStreamConfigurationMap(format, width, height,
1096 streamConfigurationPair.mDefaultStreamConfigurationMap);
1097
1098 bool isInMaximumResolutionStreamConfigurationMap =
1099 inStreamConfigurationMap(format, width, height,
1100 streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
1101
1102 // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
1103 // size + format of the OutputConfiguration is found exclusively in 1.
1104 // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
1105 // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
1106 // This maintains backwards compatibility and also tells the framework the stream
1107 // might be used in either sensor pixel mode.
1108 if (sensorPixelModesUsedSet.size() == 0) {
1109 // Ambiguous case, override to include both cases.
1110 if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
1111 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1112 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1113 return OK;
1114 }
1115 if (isInMaximumResolutionStreamConfigurationMap) {
1116 overriddenSensorPixelModesUsed->insert(
1117 ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1118 } else {
1119 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1120 }
1121 return OK;
1122 }
1123
1124 // Case2: The app has set sensorPixelModesUsed, we need to verify that they
1125 // are valid / err out.
1126 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
1127 sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
1128 ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
1129 " isn't present in default stream configuration map", __FUNCTION__, format, width,
1130 height);
1131 return BAD_VALUE;
1132 }
1133
1134 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1135 sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
1136 ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
1137 "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
1138 format, width, height);
1139 return BAD_VALUE;
1140 }
1141 *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
1142 return OK;
1143 }
1144
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)1145 bool targetPerfClassPrimaryCamera(
1146 const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
1147 int targetSdkVersion) {
1148 bool isPerfClassPrimaryCamera =
1149 perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
1150 return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
1151 }
1152
mapRequestTemplateFromClient(const std::string & cameraId,int templateId,camera_request_template_t * tempId)1153 binder::Status mapRequestTemplateFromClient(const std::string& cameraId, int templateId,
1154 camera_request_template_t* tempId /*out*/) {
1155 binder::Status ret = binder::Status::ok();
1156
1157 if (tempId == nullptr) {
1158 ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1159 "Camera %s: Invalid template argument", cameraId.c_str());
1160 return ret;
1161 }
1162 switch(templateId) {
1163 case ICameraDeviceUser::TEMPLATE_PREVIEW:
1164 *tempId = camera_request_template_t::CAMERA_TEMPLATE_PREVIEW;
1165 break;
1166 case ICameraDeviceUser::TEMPLATE_RECORD:
1167 *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_RECORD;
1168 break;
1169 case ICameraDeviceUser::TEMPLATE_STILL_CAPTURE:
1170 *tempId = camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE;
1171 break;
1172 case ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT:
1173 *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_SNAPSHOT;
1174 break;
1175 case ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG:
1176 *tempId = camera_request_template_t::CAMERA_TEMPLATE_ZERO_SHUTTER_LAG;
1177 break;
1178 case ICameraDeviceUser::TEMPLATE_MANUAL:
1179 *tempId = camera_request_template_t::CAMERA_TEMPLATE_MANUAL;
1180 break;
1181 default:
1182 ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1183 "Camera %s: Template ID %d is invalid or not supported",
1184 cameraId.c_str(), templateId);
1185 return ret;
1186 }
1187
1188 return ret;
1189 }
1190
mapRequestTemplateToAidl(camera_request_template_t templateId,RequestTemplate * id)1191 status_t mapRequestTemplateToAidl(camera_request_template_t templateId,
1192 RequestTemplate* id /*out*/) {
1193 switch (templateId) {
1194 case CAMERA_TEMPLATE_PREVIEW:
1195 *id = RequestTemplate::PREVIEW;
1196 break;
1197 case CAMERA_TEMPLATE_STILL_CAPTURE:
1198 *id = RequestTemplate::STILL_CAPTURE;
1199 break;
1200 case CAMERA_TEMPLATE_VIDEO_RECORD:
1201 *id = RequestTemplate::VIDEO_RECORD;
1202 break;
1203 case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
1204 *id = RequestTemplate::VIDEO_SNAPSHOT;
1205 break;
1206 case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
1207 *id = RequestTemplate::ZERO_SHUTTER_LAG;
1208 break;
1209 case CAMERA_TEMPLATE_MANUAL:
1210 *id = RequestTemplate::MANUAL;
1211 break;
1212 default:
1213 // Unknown template ID, or this HAL is too old to support it
1214 return BAD_VALUE;
1215 }
1216 return OK;
1217 }
1218
filterParameters(const CameraMetadata & src,const CameraMetadata & deviceInfo,const std::vector<int32_t> & additionalTags,metadata_vendor_id_t vendorTagId,CameraMetadata & dst)1219 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
1220 const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
1221 CameraMetadata& dst) {
1222 const CameraMetadata params(src);
1223 camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
1224 ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
1225 CameraMetadata filteredParams(availableSessionKeys.count);
1226 camera_metadata_t *meta = const_cast<camera_metadata_t *>(
1227 filteredParams.getAndLock());
1228 set_camera_metadata_vendor_id(meta, vendorTagId);
1229 filteredParams.unlock(meta);
1230
1231 std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
1232 availableSessionKeys.data.i32 + availableSessionKeys.count);
1233 filteredTags.insert(additionalTags.begin(), additionalTags.end());
1234 for (int32_t tag : filteredTags) {
1235 camera_metadata_ro_entry entry = params.find(tag);
1236 if (entry.count > 0) {
1237 filteredParams.update(entry);
1238 }
1239 }
1240 dst = std::move(filteredParams);
1241 }
1242
overrideDefaultRequestKeys(CameraMetadata * request)1243 status_t overrideDefaultRequestKeys(CameraMetadata *request) {
1244 // Override the template request with ZoomRatioMapper
1245 status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
1246 if (res != OK) {
1247 ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
1248 return res;
1249 }
1250
1251 // Fill in JPEG_QUALITY if not available
1252 if (!request->exists(ANDROID_JPEG_QUALITY)) {
1253 static const uint8_t kDefaultJpegQuality = 95;
1254 request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
1255 }
1256
1257 // Fill in AUTOFRAMING if not available
1258 if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
1259 static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
1260 request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
1261 }
1262
1263 return OK;
1264 }
1265
1266 } // namespace SessionConfigurationUtils
1267 } // namespace camera3
1268 } // namespace android
1269