1 /*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <cutils/properties.h>
18
19 #include "SessionConfigurationUtils.h"
20 #include "../api2/DepthCompositeStream.h"
21 #include "../api2/HeicCompositeStream.h"
22 #include "aidl/android/hardware/graphics/common/Dataspace.h"
23 #include "api2/JpegRCompositeStream.h"
24 #include "binder/Status.h"
25 #include "common/CameraDeviceBase.h"
26 #include "common/HalConversionsTemplated.h"
27 #include "../CameraService.h"
28 #include "device3/aidl/AidlCamera3Device.h"
29 #include "device3/hidl/HidlCamera3Device.h"
30 #include "device3/Camera3OutputStream.h"
31 #include "device3/ZoomRatioMapper.h"
32 #include "system/graphics-base-v1.1.h"
33 #include <camera/StringUtils.h>
34 #include <ui/PublicFormat.h>
35
36 using android::camera3::OutputStreamInfo;
37 using android::camera3::OutputStreamInfo;
38 using android::hardware::camera2::ICameraDeviceUser;
39 using aidl::android::hardware::camera::device::RequestTemplate;
40
41 namespace android {
42 namespace camera3 {
43
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)44 void StreamConfiguration::getStreamConfigurations(
45 const CameraMetadata &staticInfo, int configuration,
46 std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
47 if (scm == nullptr) {
48 ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
49 return;
50 }
51 const int STREAM_FORMAT_OFFSET = 0;
52 const int STREAM_WIDTH_OFFSET = 1;
53 const int STREAM_HEIGHT_OFFSET = 2;
54 const int STREAM_IS_INPUT_OFFSET = 3;
55
56 camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
57 for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
58 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
59 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
60 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
61 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
62 StreamConfiguration sc = {format, width, height, isInput};
63 (*scm)[format].push_back(sc);
64 }
65 }
66
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)67 void StreamConfiguration::getStreamConfigurations(
68 const CameraMetadata &staticInfo, bool maxRes,
69 std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
70 int32_t scalerKey =
71 SessionConfigurationUtils::getAppropriateModeTag(
72 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
73
74 int32_t depthKey =
75 SessionConfigurationUtils::getAppropriateModeTag(
76 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
77
78 int32_t dynamicDepthKey =
79 SessionConfigurationUtils::getAppropriateModeTag(
80 ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
81
82 int32_t heicKey =
83 SessionConfigurationUtils::getAppropriateModeTag(
84 ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
85
86 getStreamConfigurations(staticInfo, scalerKey, scm);
87 getStreamConfigurations(staticInfo, depthKey, scm);
88 getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
89 getStreamConfigurations(staticInfo, heicKey, scm);
90 }
91
92 namespace SessionConfigurationUtils {
93
94 int32_t PERF_CLASS_LEVEL =
95 property_get_int32("ro.odm.build.media_performance_class", 0);
96
97 bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
98
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)99 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
100 bool ultraHighResolution) {
101 int32_t maxJpegWidth = 0, maxJpegHeight = 0;
102 const int STREAM_CONFIGURATION_SIZE = 4;
103 const int STREAM_FORMAT_OFFSET = 0;
104 const int STREAM_WIDTH_OFFSET = 1;
105 const int STREAM_HEIGHT_OFFSET = 2;
106 const int STREAM_IS_INPUT_OFFSET = 3;
107
108 int32_t scalerSizesTag = ultraHighResolution ?
109 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
110 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
111 camera_metadata_ro_entry_t availableStreamConfigs =
112 metadata.find(scalerSizesTag);
113 if (availableStreamConfigs.count == 0 ||
114 availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
115 return camera3::Size(0, 0);
116 }
117
118 // Get max jpeg size (area-wise).
119 for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
120 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
121 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
122 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
123 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
124 if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
125 && format == HAL_PIXEL_FORMAT_BLOB &&
126 (width * height > maxJpegWidth * maxJpegHeight)) {
127 maxJpegWidth = width;
128 maxJpegHeight = height;
129 }
130 }
131
132 return camera3::Size(maxJpegWidth, maxJpegHeight);
133 }
134
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)135 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
136 camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
137 return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
138 (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
139 }
140
141 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)142 getStreamConfigurationPair(const CameraMetadata &staticInfo) {
143 camera3::StreamConfigurationPair streamConfigurationPair;
144 camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
145 &streamConfigurationPair.mDefaultStreamConfigurationMap);
146 camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
147 &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
148 return streamConfigurationPair;
149 }
150
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)151 int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
152 int64_t d0 = x0 - x1;
153 int64_t d1 = y0 - y1;
154 return d0 * d0 + d1 * d1;
155 }
156
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)157 bool roundBufferDimensionNearest(int32_t width, int32_t height,
158 int32_t format, android_dataspace dataSpace,
159 const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
160 /*out*/int32_t* outHeight) {
161 const int32_t depthSizesTag =
162 getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
163 maxResolution);
164 const int32_t scalerSizesTag =
165 getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
166 const int32_t heicSizesTag =
167 getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
168 const int32_t jpegRSizesTag = getAppropriateModeTag(
169 ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
170
171 bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
172 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
173 camera_metadata_ro_entry streamConfigs =
174 (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
175 (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
176 (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
177 info.find(heicSizesTag) :
178 info.find(scalerSizesTag);
179
180 int32_t bestWidth = -1;
181 int32_t bestHeight = -1;
182
183 // Iterate through listed stream configurations and find the one with the smallest euclidean
184 // distance from the given dimensions for the given format.
185 for (size_t i = 0; i < streamConfigs.count; i += 4) {
186 int32_t fmt = streamConfigs.data.i32[i];
187 int32_t w = streamConfigs.data.i32[i + 1];
188 int32_t h = streamConfigs.data.i32[i + 2];
189
190 // Ignore input/output type for now
191 if (fmt == format) {
192 if (w == width && h == height) {
193 bestWidth = width;
194 bestHeight = height;
195 break;
196 } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
197 SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
198 SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
199 height))) {
200 bestWidth = w;
201 bestHeight = h;
202 }
203 }
204 }
205
206 if (bestWidth == -1) {
207 // Return false if no configurations for this format were listed
208 ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
209 __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
210 return false;
211 }
212
213 // Set the outputs to the closet width/height
214 if (outWidth != NULL) {
215 *outWidth = bestWidth;
216 }
217 if (outHeight != NULL) {
218 *outHeight = bestHeight;
219 }
220
221 // Return true if at least one configuration for this format was listed
222 return true;
223 }
224
225 //check if format is 10-bit compatible
is10bitCompatibleFormat(int32_t format,android_dataspace_t dataSpace)226 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
227 switch(format) {
228 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
229 case HAL_PIXEL_FORMAT_YCBCR_P010:
230 return true;
231 case HAL_PIXEL_FORMAT_BLOB:
232 if (dataSpace == static_cast<android_dataspace_t>(
233 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
234 return true;
235 }
236
237 return false;
238 default:
239 return false;
240 }
241 }
242
isDynamicRangeProfileSupported(int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)243 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
244 if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
245 // Supported by default
246 return true;
247 }
248
249 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
250 bool is10bitDynamicRangeSupported = false;
251 for (size_t i = 0; i < entry.count; ++i) {
252 uint8_t capability = entry.data.u8[i];
253 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
254 is10bitDynamicRangeSupported = true;
255 break;
256 }
257 }
258
259 if (!is10bitDynamicRangeSupported) {
260 return false;
261 }
262
263 switch (dynamicRangeProfile) {
264 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
265 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
266 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
267 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
268 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
269 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
270 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
271 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
272 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
273 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
274 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
275 entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
276 for (size_t i = 0; i < entry.count; i += 3) {
277 if (dynamicRangeProfile == entry.data.i64[i]) {
278 return true;
279 }
280 }
281
282 return false;
283 default:
284 return false;
285 }
286
287 return false;
288 }
289
290 //check if format is 10-bit compatible
is10bitDynamicRangeProfile(int64_t dynamicRangeProfile)291 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
292 switch (dynamicRangeProfile) {
293 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
294 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
295 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
296 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
297 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
298 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
299 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
300 return true;
301 default:
302 return false;
303 }
304 }
305
deviceReportsColorSpaces(const CameraMetadata & staticInfo)306 bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
307 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
308 for (size_t i = 0; i < entry.count; ++i) {
309 uint8_t capability = entry.data.u8[i];
310 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
311 return true;
312 }
313 }
314
315 return false;
316 }
317
isColorSpaceSupported(int32_t colorSpace,int32_t format,android_dataspace dataSpace,int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)318 bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
319 int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
320 int64_t colorSpace64 = colorSpace;
321 int64_t format64 = format;
322
323 // Translate HAL format + data space to public format
324 if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
325 format64 = 0x100; // JPEG
326 } else if (format == HAL_PIXEL_FORMAT_BLOB
327 && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
328 format64 = 0x48454946; // HEIC
329 } else if (format == HAL_PIXEL_FORMAT_BLOB
330 && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
331 format64 = 0x69656963; // DEPTH_JPEG
332 } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
333 return false; // DEPTH_POINT_CLOUD, not applicable
334 } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
335 return false; // DEPTH16, not applicable
336 } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
337 return false; // RAW_DEPTH, not applicable
338 } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
339 return false; // RAW_DEPTH10, not applicable
340 } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
341 static_cast<android_dataspace>(
342 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
343 format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
344 }
345
346 camera_metadata_ro_entry_t entry =
347 staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
348 for (size_t i = 0; i < entry.count; i += 3) {
349 bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
350 bool isDynamicProfileCompatible =
351 (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
352
353 if (colorSpace64 == entry.data.i64[i]
354 && isFormatCompatible
355 && isDynamicProfileCompatible) {
356 return true;
357 }
358 }
359
360 ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
361 " combination not found", colorSpace, format64, dynamicRangeProfile);
362 return false;
363 }
364
isPublicFormat(int32_t format)365 bool isPublicFormat(int32_t format)
366 {
367 switch(format) {
368 case HAL_PIXEL_FORMAT_RGBA_8888:
369 case HAL_PIXEL_FORMAT_RGBX_8888:
370 case HAL_PIXEL_FORMAT_RGB_888:
371 case HAL_PIXEL_FORMAT_RGB_565:
372 case HAL_PIXEL_FORMAT_BGRA_8888:
373 case HAL_PIXEL_FORMAT_YV12:
374 case HAL_PIXEL_FORMAT_Y8:
375 case HAL_PIXEL_FORMAT_Y16:
376 case HAL_PIXEL_FORMAT_RAW16:
377 case HAL_PIXEL_FORMAT_RAW10:
378 case HAL_PIXEL_FORMAT_RAW12:
379 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
380 case HAL_PIXEL_FORMAT_BLOB:
381 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
382 case HAL_PIXEL_FORMAT_YCbCr_420_888:
383 case HAL_PIXEL_FORMAT_YCbCr_422_SP:
384 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
385 case HAL_PIXEL_FORMAT_YCbCr_422_I:
386 return true;
387 default:
388 return false;
389 }
390 }
391
dataSpaceFromColorSpace(android_dataspace * dataSpace,int32_t colorSpace)392 bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
393 switch (colorSpace) {
394 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
395 *dataSpace = HAL_DATASPACE_V0_SRGB;
396 return true;
397 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
398 *dataSpace = HAL_DATASPACE_DISPLAY_P3;
399 return true;
400 case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
401 *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
402 return true;
403 default:
404 ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
405 return false;
406 }
407 }
408
isStreamUseCaseSupported(int64_t streamUseCase,const CameraMetadata & deviceInfo)409 bool isStreamUseCaseSupported(int64_t streamUseCase,
410 const CameraMetadata &deviceInfo) {
411 camera_metadata_ro_entry_t availableStreamUseCases =
412 deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
413
414 if (availableStreamUseCases.count == 0 &&
415 streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
416 return true;
417 }
418 // Allow vendor stream use case unconditionally.
419 if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
420 return true;
421 }
422
423 for (size_t i = 0; i < availableStreamUseCases.count; i++) {
424 if (availableStreamUseCases.data.i64[i] == streamUseCase) {
425 return true;
426 }
427 }
428 return false;
429 }
430
createSurfaceFromGbp(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & surface,const sp<IGraphicBufferProducer> & gbp,const std::string & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed,int64_t dynamicRangeProfile,int64_t streamUseCase,int timestampBase,int mirrorMode,int32_t colorSpace,bool respectSurfaceSize)431 binder::Status createSurfaceFromGbp(
432 OutputStreamInfo& streamInfo, bool isStreamInfoValid,
433 sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
434 const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
435 const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
436 int64_t streamUseCase, int timestampBase, int mirrorMode,
437 int32_t colorSpace, bool respectSurfaceSize) {
438 // bufferProducer must be non-null
439 if (gbp == nullptr) {
440 std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
441 ALOGW("%s: %s", __FUNCTION__, msg.c_str());
442 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
443 }
444 // HACK b/10949105
445 // Query consumer usage bits to set async operation mode for
446 // GLConsumer using controlledByApp parameter.
447 bool useAsync = false;
448 uint64_t consumerUsage = 0;
449 status_t err;
450 if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
451 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
452 logicalCameraId.c_str(), strerror(-err), err);
453 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
454 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
455 }
456 if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
457 ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
458 "stream", __FUNCTION__, logicalCameraId.c_str(), consumerUsage);
459 useAsync = true;
460 }
461
462 uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
463 GRALLOC_USAGE_RENDERSCRIPT;
464 uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
465 GraphicBuffer::USAGE_HW_TEXTURE |
466 GraphicBuffer::USAGE_HW_COMPOSER;
467 bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
468 (consumerUsage & allowedFlags) != 0;
469
470 surface = new Surface(gbp, useAsync);
471 ANativeWindow *anw = surface.get();
472
473 int width, height, format;
474 android_dataspace dataSpace;
475 if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
476 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface width: %s (%d)",
477 logicalCameraId.c_str(), strerror(-err), err);
478 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
479 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
480 }
481 if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
482 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface height: %s (%d)",
483 logicalCameraId.c_str(), strerror(-err), err);
484 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
485 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
486 }
487 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
488 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface format: %s (%d)",
489 logicalCameraId.c_str(), strerror(-err), err);
490 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
491 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
492 }
493 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
494 reinterpret_cast<int*>(&dataSpace))) != OK) {
495 std::string msg = fmt::sprintf("Camera %s: Failed to query Surface dataspace: %s (%d)",
496 logicalCameraId.c_str(), strerror(-err), err);
497 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
498 return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
499 }
500
501 if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
502 format != HAL_PIXEL_FORMAT_BLOB) {
503 if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
504 std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
505 "convert to data space", logicalCameraId.c_str(), colorSpace);
506 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
507 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
508 }
509 }
510
511 // FIXME: remove this override since the default format should be
512 // IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
513 if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
514 ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
515 ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
516 ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
517 __FUNCTION__, logicalCameraId.c_str(), format);
518 format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
519 }
520 std::unordered_set<int32_t> overriddenSensorPixelModes;
521 if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
522 physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
523 std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
524 "format %#x are not valid",logicalCameraId.c_str(), format);
525 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
526 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
527 }
528 bool foundInMaxRes = false;
529 if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
530 overriddenSensorPixelModes.end()) {
531 // we can use the default stream configuration map
532 foundInMaxRes = true;
533 }
534 // Round dimensions to the nearest dimensions available for this format.
535 // Only do the rounding if the client doesn't ask to respect the surface
536 // size.
537 if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
538 !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
539 format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
540 /*out*/&height)) {
541 std::string msg = fmt::sprintf("Camera %s: No supported stream configurations with "
542 "format %#x defined, failed to create output stream",
543 logicalCameraId.c_str(), format);
544 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
545 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
546 }
547 if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
548 physicalCameraMetadata)) {
549 std::string msg = fmt::sprintf("Camera %s: Dynamic range profile 0x%" PRIx64
550 " not supported,failed to create output stream", logicalCameraId.c_str(),
551 dynamicRangeProfile);
552 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
553 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
554 }
555 if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
556 !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
557 std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
558 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
559 logicalCameraId.c_str(), format, dynamicRangeProfile);
560 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
561 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
562 }
563 if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
564 SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
565 !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
566 dynamicRangeProfile, physicalCameraMetadata)) {
567 std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
568 "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
569 logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
570 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
571 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
572 }
573 if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
574 physicalCameraMetadata)) {
575 std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
576 " failed to create output stream", logicalCameraId.c_str(), streamUseCase);
577 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
578 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
579 }
580 if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
581 timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
582 std::string msg = fmt::sprintf("Camera %s: invalid timestamp base %d",
583 logicalCameraId.c_str(), timestampBase);
584 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
585 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
586 }
587 if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
588 mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
589 std::string msg = fmt::sprintf("Camera %s: invalid mirroring mode %d",
590 logicalCameraId.c_str(), mirrorMode);
591 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
592 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
593 }
594
595 if (!isStreamInfoValid) {
596 streamInfo.width = width;
597 streamInfo.height = height;
598 streamInfo.format = format;
599 streamInfo.dataSpace = dataSpace;
600 streamInfo.consumerUsage = consumerUsage;
601 streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
602 streamInfo.dynamicRangeProfile = dynamicRangeProfile;
603 streamInfo.streamUseCase = streamUseCase;
604 streamInfo.timestampBase = timestampBase;
605 streamInfo.mirrorMode = mirrorMode;
606 streamInfo.colorSpace = colorSpace;
607 return binder::Status::ok();
608 }
609 if (width != streamInfo.width) {
610 std::string msg = fmt::sprintf("Camera %s:Surface width doesn't match: %d vs %d",
611 logicalCameraId.c_str(), width, streamInfo.width);
612 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
613 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
614 }
615 if (height != streamInfo.height) {
616 std::string msg = fmt::sprintf("Camera %s:Surface height doesn't match: %d vs %d",
617 logicalCameraId.c_str(), height, streamInfo.height);
618 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
619 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
620 }
621 if (format != streamInfo.format) {
622 std::string msg = fmt::sprintf("Camera %s:Surface format doesn't match: %d vs %d",
623 logicalCameraId.c_str(), format, streamInfo.format);
624 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
625 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
626 }
627 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
628 if (dataSpace != streamInfo.dataSpace) {
629 std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
630 logicalCameraId.c_str(), static_cast<int>(dataSpace), static_cast<int>(streamInfo.dataSpace));
631 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
632 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
633 }
634 //At the native side, there isn't a way to check whether 2 surfaces come from the same
635 //surface class type. Use usage flag to approximate the comparison.
636 if (consumerUsage != streamInfo.consumerUsage) {
637 std::string msg = fmt::sprintf(
638 "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
639 logicalCameraId.c_str(), consumerUsage, streamInfo.consumerUsage);
640 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
641 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
642 }
643 }
644 return binder::Status::ok();
645 }
646
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,const std::string & physicalId,int32_t groupId,aidl::android::hardware::camera::device::Stream * stream)647 void mapStreamInfo(const OutputStreamInfo &streamInfo,
648 camera3::camera_stream_rotation_t rotation, const std::string &physicalId,
649 int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
650 if (stream == nullptr) {
651 return;
652 }
653
654 stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
655 stream->width = streamInfo.width;
656 stream->height = streamInfo.height;
657 stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
658 auto u = streamInfo.consumerUsage;
659 camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
660 stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
661 stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
662 stream->colorSpace = streamInfo.colorSpace;
663 stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
664 stream->id = -1; // Invalid stream id
665 stream->physicalCameraId = physicalId;
666 stream->bufferSize = 0;
667 stream->groupId = groupId;
668 stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
669 size_t idx = 0;
670 using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
671 for (auto mode : streamInfo.sensorPixelModesUsed) {
672 stream->sensorPixelModesUsed[idx++] =
673 static_cast<SensorPixelMode>(mode);
674 }
675 using DynamicRangeProfile =
676 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
677 stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
678 using StreamUseCases =
679 aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
680 stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
681 }
682
mapStream(const OutputStreamInfo & streamInfo,bool isCompositeJpegRDisabled,const CameraMetadata & deviceInfo,camera_stream_rotation_t rotation,size_t * streamIdx,const std::string & physicalId,int32_t groupId,const std::string & logicalCameraId,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool * earlyExit)683 binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
684 const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
685 size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
686 const std::string& logicalCameraId,
687 aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
688 bool *earlyExit /*out*/) {
689 bool isDepthCompositeStream =
690 camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
691 bool isHeicCompositeStream =
692 camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
693 bool isJpegRCompositeStream =
694 camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
695 !isCompositeJpegRDisabled;
696 if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
697 // We need to take in to account that composite streams can have
698 // additional internal camera streams.
699 std::vector<OutputStreamInfo> compositeStreams;
700 status_t ret;
701 if (isDepthCompositeStream) {
702 // TODO: Take care of composite streams.
703 ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
704 deviceInfo, &compositeStreams);
705 } else if (isHeicCompositeStream) {
706 ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
707 deviceInfo, &compositeStreams);
708 } else {
709 ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
710 deviceInfo, &compositeStreams);
711 }
712
713 if (ret != OK) {
714 std::string msg = fmt::sprintf(
715 "Camera %s: Failed adding composite streams: %s (%d)",
716 logicalCameraId.c_str(), strerror(-ret), ret);
717 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
718 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
719 }
720
721 if (compositeStreams.size() == 0) {
722 // No internal streams means composite stream not
723 // supported.
724 *earlyExit = true;
725 return binder::Status::ok();
726 } else if (compositeStreams.size() > 1) {
727 size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
728 streamConfiguration.streams.resize(streamCount);
729 }
730
731 for (const auto& compositeStream : compositeStreams) {
732 mapStreamInfo(compositeStream, rotation,
733 physicalId, groupId,
734 &streamConfiguration.streams[(*streamIdx)++]);
735 }
736 } else {
737 mapStreamInfo(streamInfo, rotation,
738 physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
739 }
740
741 return binder::Status::ok();
742 }
743
744 binder::Status
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const std::string & logicalCameraId,const CameraMetadata & deviceInfo,bool isCompositeJpegRDisabled,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,metadata_vendor_id_t vendorTagId,bool checkSessionParams,const std::vector<int32_t> & additionalKeys,bool * earlyExit)745 convertToHALStreamCombination(
746 const SessionConfiguration& sessionConfiguration,
747 const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
748 bool isCompositeJpegRDisabled,
749 metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
750 aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
751 bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
752 bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
753 bool *earlyExit) {
754 using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
755 auto operatingMode = sessionConfiguration.getOperatingMode();
756 binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
757 logicalCameraId);
758 if (!res.isOk()) {
759 return res;
760 }
761
762 if (earlyExit == nullptr) {
763 std::string msg("earlyExit nullptr");
764 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
765 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
766 }
767 *earlyExit = false;
768 auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
769 static_cast<camera_stream_configuration_mode_t> (operatingMode),
770 /*out*/ &streamConfiguration.operationMode);
771 if (ret != OK) {
772 std::string msg = fmt::sprintf(
773 "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
774 logicalCameraId.c_str(), operatingMode, strerror(-ret), ret);
775 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
776 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
777 msg.c_str());
778 }
779
780 bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
781 (sessionConfiguration.getInputHeight() > 0) &&
782 (sessionConfiguration.getInputFormat() > 0);
783 auto outputConfigs = sessionConfiguration.getOutputConfigurations();
784 size_t streamCount = outputConfigs.size();
785 streamCount = isInputValid ? streamCount + 1 : streamCount;
786 streamConfiguration.streams.resize(streamCount);
787 size_t streamIdx = 0;
788 if (isInputValid) {
789 std::vector<SensorPixelMode> defaultSensorPixelModes;
790 defaultSensorPixelModes.resize(1);
791 defaultSensorPixelModes[0] =
792 static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
793 aidl::android::hardware::camera::device::Stream stream;
794 stream.id = 0;
795 stream.streamType = aidl::android::hardware::camera::device::StreamType::INPUT;
796 stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
797 stream.height = static_cast<uint32_t> (sessionConfiguration.getInputHeight());
798 stream.format =
799 AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
800 sessionConfiguration.getInputFormat());
801 stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
802 stream.dataSpace =
803 static_cast<aidl::android::hardware::graphics::common::Dataspace>(
804 HAL_DATASPACE_UNKNOWN);
805 stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
806 stream.bufferSize = 0;
807 stream.groupId = -1;
808 stream.sensorPixelModesUsed = defaultSensorPixelModes;
809 using DynamicRangeProfile =
810 aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
811 stream.dynamicRangeProfile =
812 DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
813 streamConfiguration.streams[streamIdx++] = stream;
814 streamConfiguration.multiResolutionInputImage =
815 sessionConfiguration.inputIsMultiResolution();
816 }
817
818 for (const auto &it : outputConfigs) {
819 const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
820 it.getGraphicBufferProducers();
821 bool deferredConsumer = it.isDeferred();
822 bool isConfigurationComplete = it.isComplete();
823 const std::string &physicalCameraId = it.getPhysicalCameraId();
824
825 int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
826 int32_t colorSpace = it.getColorSpace();
827 std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
828 const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
829 overrideForPerfClass);
830 const CameraMetadata &metadataChosen =
831 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
832
833 size_t numBufferProducers = bufferProducers.size();
834 bool isStreamInfoValid = false;
835 int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
836 OutputStreamInfo streamInfo;
837
838 res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
839 isConfigurationComplete);
840 if (!res.isOk()) {
841 return res;
842 }
843 res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
844 logicalCameraId);
845 if (!res.isOk()) {
846 return res;
847 }
848
849 int64_t streamUseCase = it.getStreamUseCase();
850 int timestampBase = it.getTimestampBase();
851 int mirrorMode = it.getMirrorMode();
852 // If the configuration is a deferred consumer, or a not yet completed
853 // configuration with no buffer producers attached.
854 if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
855 streamInfo.width = it.getWidth();
856 streamInfo.height = it.getHeight();
857 auto surfaceType = it.getSurfaceType();
858 switch (surfaceType) {
859 case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
860 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
861 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
862 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
863 break;
864 case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
865 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
866 | GraphicBuffer::USAGE_HW_COMPOSER;
867 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
868 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
869 break;
870 case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
871 case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
872 streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
873 streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
874 streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
875 break;
876 case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
877 streamInfo.consumerUsage = it.getUsage();
878 streamInfo.format = it.getFormat();
879 streamInfo.dataSpace = (android_dataspace)it.getDataspace();
880 break;
881 default:
882 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
883 "Invalid surface type.");
884 }
885 streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
886 if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
887 streamInfo.format, streamInfo.width,
888 streamInfo.height, metadataChosen,
889 &streamInfo.sensorPixelModesUsed) != OK) {
890 ALOGE("%s: Deferred surface sensor pixel modes not valid",
891 __FUNCTION__);
892 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
893 "Deferred surface sensor pixel modes not valid");
894 }
895 streamInfo.streamUseCase = streamUseCase;
896 auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
897 camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
898 logicalCameraId, streamConfiguration, earlyExit);
899 if (*earlyExit || !status.isOk()) {
900 return status;
901 }
902
903 isStreamInfoValid = true;
904
905 if (numBufferProducers == 0) {
906 continue;
907 }
908 }
909
910 for (auto& bufferProducer : bufferProducers) {
911 sp<Surface> surface;
912 res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
913 logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
914 streamUseCase, timestampBase, mirrorMode, colorSpace,
915 /*respectSurfaceSize*/true);
916
917 if (!res.isOk())
918 return res;
919
920 if (!isStreamInfoValid) {
921 auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
922 static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
923 physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
924 if (*earlyExit || !status.isOk()) {
925 return status;
926 }
927 isStreamInfoValid = true;
928 }
929 }
930 }
931
932 if (checkSessionParams) {
933 const CameraMetadata &deviceInfo = getMetadata(logicalCameraId,
934 /*overrideForPerfClass*/false);
935 CameraMetadata filteredParams;
936
937 filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
938 additionalKeys, vendorTagId, filteredParams);
939
940 camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
941 uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
942 streamConfiguration.sessionParams.metadata.assign(metadataP,
943 metadataP + get_camera_metadata_size(metadata));
944 }
945
946 return binder::Status::ok();
947 }
948
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const std::string & physicalCameraId,const std::string & logicalCameraId)949 binder::Status checkPhysicalCameraId(
950 const std::vector<std::string> &physicalCameraIds, const std::string &physicalCameraId,
951 const std::string &logicalCameraId) {
952 if (physicalCameraId.size() == 0) {
953 return binder::Status::ok();
954 }
955 if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
956 physicalCameraId) == physicalCameraIds.end()) {
957 std::string msg = fmt::sprintf("Camera %s: Camera doesn't support physicalCameraId %s.",
958 logicalCameraId.c_str(), physicalCameraId.c_str());
959 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
960 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
961 }
962 return binder::Status::ok();
963 }
964
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType,bool isConfigurationComplete)965 binder::Status checkSurfaceType(size_t numBufferProducers,
966 bool deferredConsumer, int surfaceType, bool isConfigurationComplete) {
967 if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
968 ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
969 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
970 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
971 } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
972 ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
973 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
974 }
975
976 if (deferredConsumer) {
977 bool validSurfaceType = (
978 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
979 (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
980 if (!validSurfaceType) {
981 std::string msg = fmt::sprintf("Deferred target surface has invalid "
982 "surfaceType = %d.", surfaceType);
983 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
984 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
985 }
986 } else if (!isConfigurationComplete && numBufferProducers == 0) {
987 bool validSurfaceType = (
988 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
989 (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
990 (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
991 if (!validSurfaceType) {
992 std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
993 "surfaceType = %d.", surfaceType);
994 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
995 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
996 }
997 }
998
999 return binder::Status::ok();
1000 }
1001
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const std::string & cameraId)1002 binder::Status checkOperatingMode(int operatingMode,
1003 const CameraMetadata &staticInfo, const std::string &cameraId) {
1004 if (operatingMode < 0) {
1005 std::string msg = fmt::sprintf(
1006 "Camera %s: Invalid operating mode %d requested", cameraId.c_str(), operatingMode);
1007 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1008 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1009 msg.c_str());
1010 }
1011
1012 bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
1013 if (isConstrainedHighSpeed) {
1014 camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1015 bool isConstrainedHighSpeedSupported = false;
1016 for(size_t i = 0; i < entry.count; ++i) {
1017 uint8_t capability = entry.data.u8[i];
1018 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
1019 isConstrainedHighSpeedSupported = true;
1020 break;
1021 }
1022 }
1023 if (!isConstrainedHighSpeedSupported) {
1024 std::string msg = fmt::sprintf(
1025 "Camera %s: Try to create a constrained high speed configuration on a device"
1026 " that doesn't support it.", cameraId.c_str());
1027 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
1028 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
1029 msg.c_str());
1030 }
1031 }
1032
1033 return binder::Status::ok();
1034 }
1035
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)1036 static bool inStreamConfigurationMap(int format, int width, int height,
1037 const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
1038 auto scs = sm.find(format);
1039 if (scs == sm.end()) {
1040 return false;
1041 }
1042 for (auto &sc : scs->second) {
1043 if (sc.width == width && sc.height == height && sc.isInput == 0) {
1044 return true;
1045 }
1046 }
1047 return false;
1048 }
1049
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)1050 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
1051 return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
1052 }
1053
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)1054 status_t checkAndOverrideSensorPixelModesUsed(
1055 const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
1056 const CameraMetadata &staticInfo,
1057 std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
1058
1059 const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
1060 convertToSet(sensorPixelModesUsed);
1061 if (!supportsUltraHighResolutionCapture(staticInfo)) {
1062 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1063 sensorPixelModesUsedSet.end()) {
1064 // invalid value for non ultra high res sensors
1065 ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
1066 "support ultra high resolution capture", __FUNCTION__);
1067 return BAD_VALUE;
1068 }
1069 overriddenSensorPixelModesUsed->clear();
1070 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1071 return OK;
1072 }
1073
1074 StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
1075
1076 bool isInDefaultStreamConfigurationMap =
1077 inStreamConfigurationMap(format, width, height,
1078 streamConfigurationPair.mDefaultStreamConfigurationMap);
1079
1080 bool isInMaximumResolutionStreamConfigurationMap =
1081 inStreamConfigurationMap(format, width, height,
1082 streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
1083
1084 // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
1085 // size + format of the OutputConfiguration is found exclusively in 1.
1086 // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
1087 // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
1088 // This maintains backwards compatibility and also tells the framework the stream
1089 // might be used in either sensor pixel mode.
1090 if (sensorPixelModesUsedSet.size() == 0) {
1091 // Ambiguous case, override to include both cases.
1092 if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
1093 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1094 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1095 return OK;
1096 }
1097 if (isInMaximumResolutionStreamConfigurationMap) {
1098 overriddenSensorPixelModesUsed->insert(
1099 ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1100 } else {
1101 overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1102 }
1103 return OK;
1104 }
1105
1106 // Case2: The app has set sensorPixelModesUsed, we need to verify that they
1107 // are valid / err out.
1108 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
1109 sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
1110 ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
1111 " isn't present in default stream configuration map", __FUNCTION__, format, width,
1112 height);
1113 return BAD_VALUE;
1114 }
1115
1116 if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1117 sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
1118 ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
1119 "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
1120 format, width, height);
1121 return BAD_VALUE;
1122 }
1123 *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
1124 return OK;
1125 }
1126
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)1127 bool targetPerfClassPrimaryCamera(
1128 const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
1129 int targetSdkVersion) {
1130 bool isPerfClassPrimaryCamera =
1131 perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
1132 return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
1133 }
1134
mapRequestTemplateFromClient(const std::string & cameraId,int templateId,camera_request_template_t * tempId)1135 binder::Status mapRequestTemplateFromClient(const std::string& cameraId, int templateId,
1136 camera_request_template_t* tempId /*out*/) {
1137 binder::Status ret = binder::Status::ok();
1138
1139 if (tempId == nullptr) {
1140 ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1141 "Camera %s: Invalid template argument", cameraId.c_str());
1142 return ret;
1143 }
1144 switch(templateId) {
1145 case ICameraDeviceUser::TEMPLATE_PREVIEW:
1146 *tempId = camera_request_template_t::CAMERA_TEMPLATE_PREVIEW;
1147 break;
1148 case ICameraDeviceUser::TEMPLATE_RECORD:
1149 *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_RECORD;
1150 break;
1151 case ICameraDeviceUser::TEMPLATE_STILL_CAPTURE:
1152 *tempId = camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE;
1153 break;
1154 case ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT:
1155 *tempId = camera_request_template_t::CAMERA_TEMPLATE_VIDEO_SNAPSHOT;
1156 break;
1157 case ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG:
1158 *tempId = camera_request_template_t::CAMERA_TEMPLATE_ZERO_SHUTTER_LAG;
1159 break;
1160 case ICameraDeviceUser::TEMPLATE_MANUAL:
1161 *tempId = camera_request_template_t::CAMERA_TEMPLATE_MANUAL;
1162 break;
1163 default:
1164 ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
1165 "Camera %s: Template ID %d is invalid or not supported",
1166 cameraId.c_str(), templateId);
1167 return ret;
1168 }
1169
1170 return ret;
1171 }
1172
mapRequestTemplateToAidl(camera_request_template_t templateId,RequestTemplate * id)1173 status_t mapRequestTemplateToAidl(camera_request_template_t templateId,
1174 RequestTemplate* id /*out*/) {
1175 switch (templateId) {
1176 case CAMERA_TEMPLATE_PREVIEW:
1177 *id = RequestTemplate::PREVIEW;
1178 break;
1179 case CAMERA_TEMPLATE_STILL_CAPTURE:
1180 *id = RequestTemplate::STILL_CAPTURE;
1181 break;
1182 case CAMERA_TEMPLATE_VIDEO_RECORD:
1183 *id = RequestTemplate::VIDEO_RECORD;
1184 break;
1185 case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
1186 *id = RequestTemplate::VIDEO_SNAPSHOT;
1187 break;
1188 case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
1189 *id = RequestTemplate::ZERO_SHUTTER_LAG;
1190 break;
1191 case CAMERA_TEMPLATE_MANUAL:
1192 *id = RequestTemplate::MANUAL;
1193 break;
1194 default:
1195 // Unknown template ID, or this HAL is too old to support it
1196 return BAD_VALUE;
1197 }
1198 return OK;
1199 }
1200
filterParameters(const CameraMetadata & src,const CameraMetadata & deviceInfo,const std::vector<int32_t> & additionalTags,metadata_vendor_id_t vendorTagId,CameraMetadata & dst)1201 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
1202 const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
1203 CameraMetadata& dst) {
1204 const CameraMetadata params(src);
1205 camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
1206 ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
1207 CameraMetadata filteredParams(availableSessionKeys.count);
1208 camera_metadata_t *meta = const_cast<camera_metadata_t *>(
1209 filteredParams.getAndLock());
1210 set_camera_metadata_vendor_id(meta, vendorTagId);
1211 filteredParams.unlock(meta);
1212
1213 std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
1214 availableSessionKeys.data.i32 + availableSessionKeys.count);
1215 filteredTags.insert(additionalTags.begin(), additionalTags.end());
1216 for (int32_t tag : filteredTags) {
1217 camera_metadata_ro_entry entry = params.find(tag);
1218 if (entry.count > 0) {
1219 filteredParams.update(entry);
1220 }
1221 }
1222 dst = std::move(filteredParams);
1223 }
1224
overrideDefaultRequestKeys(CameraMetadata * request)1225 status_t overrideDefaultRequestKeys(CameraMetadata *request) {
1226 // Override the template request with ZoomRatioMapper
1227 status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
1228 if (res != OK) {
1229 ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
1230 return res;
1231 }
1232
1233 // Fill in JPEG_QUALITY if not available
1234 if (!request->exists(ANDROID_JPEG_QUALITY)) {
1235 static const uint8_t kDefaultJpegQuality = 95;
1236 request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
1237 }
1238
1239 // Fill in AUTOFRAMING if not available
1240 if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
1241 static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
1242 request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
1243 }
1244
1245 return OK;
1246 }
1247
1248 } // namespace SessionConfigurationUtils
1249 } // namespace camera3
1250 } // namespace android
1251