• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <cutils/properties.h>
18 
19 #include "SessionConfigurationUtils.h"
20 #include "../api2/DepthCompositeStream.h"
21 #include "../api2/HeicCompositeStream.h"
22 #include "common/CameraDeviceBase.h"
23 #include "common/HalConversionsTemplated.h"
24 #include "../CameraService.h"
25 #include "device3/aidl/AidlCamera3Device.h"
26 #include "device3/hidl/HidlCamera3Device.h"
27 #include "device3/Camera3OutputStream.h"
28 #include "system/graphics-base-v1.1.h"
29 
30 using android::camera3::OutputStreamInfo;
31 using android::camera3::OutputStreamInfo;
32 using android::hardware::camera2::ICameraDeviceUser;
33 
34 namespace android {
35 namespace camera3 {
36 
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)37 void StreamConfiguration::getStreamConfigurations(
38         const CameraMetadata &staticInfo, int configuration,
39         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
40     if (scm == nullptr) {
41         ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
42         return;
43     }
44     const int STREAM_FORMAT_OFFSET = 0;
45     const int STREAM_WIDTH_OFFSET = 1;
46     const int STREAM_HEIGHT_OFFSET = 2;
47     const int STREAM_IS_INPUT_OFFSET = 3;
48 
49     camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
50     for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
51         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
52         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
53         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
54         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
55         StreamConfiguration sc = {format, width, height, isInput};
56         (*scm)[format].push_back(sc);
57     }
58 }
59 
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)60 void StreamConfiguration::getStreamConfigurations(
61         const CameraMetadata &staticInfo, bool maxRes,
62         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
63     int32_t scalerKey =
64             SessionConfigurationUtils::getAppropriateModeTag(
65                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
66 
67     int32_t depthKey =
68             SessionConfigurationUtils::getAppropriateModeTag(
69                     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
70 
71     int32_t dynamicDepthKey =
72             SessionConfigurationUtils::getAppropriateModeTag(
73                     ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
74 
75     int32_t heicKey =
76             SessionConfigurationUtils::getAppropriateModeTag(
77                     ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
78 
79     getStreamConfigurations(staticInfo, scalerKey, scm);
80     getStreamConfigurations(staticInfo, depthKey, scm);
81     getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
82     getStreamConfigurations(staticInfo, heicKey, scm);
83 }
84 
85 namespace SessionConfigurationUtils {
86 
87 int32_t PERF_CLASS_LEVEL =
88         property_get_int32("ro.odm.build.media_performance_class", 0);
89 
90 bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
91 
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)92 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
93         bool ultraHighResolution) {
94     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
95     const int STREAM_CONFIGURATION_SIZE = 4;
96     const int STREAM_FORMAT_OFFSET = 0;
97     const int STREAM_WIDTH_OFFSET = 1;
98     const int STREAM_HEIGHT_OFFSET = 2;
99     const int STREAM_IS_INPUT_OFFSET = 3;
100 
101     int32_t scalerSizesTag = ultraHighResolution ?
102             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
103                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
104     camera_metadata_ro_entry_t availableStreamConfigs =
105             metadata.find(scalerSizesTag);
106     if (availableStreamConfigs.count == 0 ||
107             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
108         return camera3::Size(0, 0);
109     }
110 
111     // Get max jpeg size (area-wise).
112     for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
113         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
114         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
115         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
116         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
117         if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
118                 && format == HAL_PIXEL_FORMAT_BLOB &&
119                 (width * height > maxJpegWidth * maxJpegHeight)) {
120             maxJpegWidth = width;
121             maxJpegHeight = height;
122         }
123     }
124 
125     return camera3::Size(maxJpegWidth, maxJpegHeight);
126 }
127 
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)128 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
129         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
130     return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
131             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
132 }
133 
134 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)135 getStreamConfigurationPair(const CameraMetadata &staticInfo) {
136     camera3::StreamConfigurationPair streamConfigurationPair;
137     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
138             &streamConfigurationPair.mDefaultStreamConfigurationMap);
139     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
140             &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
141     return streamConfigurationPair;
142 }
143 
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)144 int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
145     int64_t d0 = x0 - x1;
146     int64_t d1 = y0 - y1;
147     return d0 * d0 + d1 * d1;
148 }
149 
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)150 bool roundBufferDimensionNearest(int32_t width, int32_t height,
151         int32_t format, android_dataspace dataSpace,
152         const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
153         /*out*/int32_t* outHeight) {
154     const int32_t depthSizesTag =
155             getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
156                     maxResolution);
157     const int32_t scalerSizesTag =
158             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
159     const int32_t heicSizesTag =
160             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
161 
162     camera_metadata_ro_entry streamConfigs =
163             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
164             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
165             info.find(heicSizesTag) :
166             info.find(scalerSizesTag);
167 
168     int32_t bestWidth = -1;
169     int32_t bestHeight = -1;
170 
171     // Iterate through listed stream configurations and find the one with the smallest euclidean
172     // distance from the given dimensions for the given format.
173     for (size_t i = 0; i < streamConfigs.count; i += 4) {
174         int32_t fmt = streamConfigs.data.i32[i];
175         int32_t w = streamConfigs.data.i32[i + 1];
176         int32_t h = streamConfigs.data.i32[i + 2];
177 
178         // Ignore input/output type for now
179         if (fmt == format) {
180             if (w == width && h == height) {
181                 bestWidth = width;
182                 bestHeight = height;
183                 break;
184             } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
185                     SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
186                     SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
187                             height))) {
188                 bestWidth = w;
189                 bestHeight = h;
190             }
191         }
192     }
193 
194     if (bestWidth == -1) {
195         // Return false if no configurations for this format were listed
196         return false;
197     }
198 
199     // Set the outputs to the closet width/height
200     if (outWidth != NULL) {
201         *outWidth = bestWidth;
202     }
203     if (outHeight != NULL) {
204         *outHeight = bestHeight;
205     }
206 
207     // Return true if at least one configuration for this format was listed
208     return true;
209 }
210 
211 //check if format is 10-bit compatible
is10bitCompatibleFormat(int32_t format)212 bool is10bitCompatibleFormat(int32_t format) {
213     switch(format) {
214         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
215         case HAL_PIXEL_FORMAT_YCBCR_P010:
216             return true;
217         default:
218             return false;
219     }
220 }
221 
isDynamicRangeProfileSupported(int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)222 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
223     if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
224         // Supported by default
225         return true;
226     }
227 
228     camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
229     bool is10bitDynamicRangeSupported = false;
230     for (size_t i = 0; i < entry.count; ++i) {
231         uint8_t capability = entry.data.u8[i];
232         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
233             is10bitDynamicRangeSupported = true;
234             break;
235         }
236     }
237 
238     if (!is10bitDynamicRangeSupported) {
239         return false;
240     }
241 
242     switch (dynamicRangeProfile) {
243         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
244         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
245         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
246         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
247         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
248         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
249         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
250         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
251         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
252         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
253         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
254             entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
255             for (size_t i = 0; i < entry.count; i += 3) {
256                 if (dynamicRangeProfile == entry.data.i64[i]) {
257                     return true;
258                 }
259             }
260 
261             return false;
262         default:
263             return false;
264     }
265 
266     return false;
267 }
268 
269 //check if format is 10-bit compatible
is10bitDynamicRangeProfile(int64_t dynamicRangeProfile)270 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
271     switch (dynamicRangeProfile) {
272         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
273         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
274         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
275         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
276         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
277         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
278         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
279             return true;
280         default:
281             return false;
282     }
283 }
284 
isPublicFormat(int32_t format)285 bool isPublicFormat(int32_t format)
286 {
287     switch(format) {
288         case HAL_PIXEL_FORMAT_RGBA_8888:
289         case HAL_PIXEL_FORMAT_RGBX_8888:
290         case HAL_PIXEL_FORMAT_RGB_888:
291         case HAL_PIXEL_FORMAT_RGB_565:
292         case HAL_PIXEL_FORMAT_BGRA_8888:
293         case HAL_PIXEL_FORMAT_YV12:
294         case HAL_PIXEL_FORMAT_Y8:
295         case HAL_PIXEL_FORMAT_Y16:
296         case HAL_PIXEL_FORMAT_RAW16:
297         case HAL_PIXEL_FORMAT_RAW10:
298         case HAL_PIXEL_FORMAT_RAW12:
299         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
300         case HAL_PIXEL_FORMAT_BLOB:
301         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
302         case HAL_PIXEL_FORMAT_YCbCr_420_888:
303         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
304         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
305         case HAL_PIXEL_FORMAT_YCbCr_422_I:
306             return true;
307         default:
308             return false;
309     }
310 }
311 
isStreamUseCaseSupported(int64_t streamUseCase,const CameraMetadata & deviceInfo)312 bool isStreamUseCaseSupported(int64_t streamUseCase,
313         const CameraMetadata &deviceInfo) {
314     camera_metadata_ro_entry_t availableStreamUseCases =
315             deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
316 
317     if (availableStreamUseCases.count == 0 &&
318             streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
319         return true;
320     }
321     // Allow vendor stream use case unconditionally.
322     if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
323         return true;
324     }
325 
326     for (size_t i = 0; i < availableStreamUseCases.count; i++) {
327         if (availableStreamUseCases.data.i64[i] == streamUseCase) {
328             return true;
329         }
330     }
331     return false;
332 }
333 
createSurfaceFromGbp(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & surface,const sp<IGraphicBufferProducer> & gbp,const String8 & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed,int64_t dynamicRangeProfile,int64_t streamUseCase,int timestampBase,int mirrorMode)334 binder::Status createSurfaceFromGbp(
335         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
336         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
337         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
338         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
339         int64_t streamUseCase, int timestampBase, int mirrorMode) {
340     // bufferProducer must be non-null
341     if (gbp == nullptr) {
342         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
343         ALOGW("%s: %s", __FUNCTION__, msg.string());
344         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
345     }
346     // HACK b/10949105
347     // Query consumer usage bits to set async operation mode for
348     // GLConsumer using controlledByApp parameter.
349     bool useAsync = false;
350     uint64_t consumerUsage = 0;
351     status_t err;
352     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
353         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
354                 logicalCameraId.string(), strerror(-err), err);
355         ALOGE("%s: %s", __FUNCTION__, msg.string());
356         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
357     }
358     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
359         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
360                 "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
361         useAsync = true;
362     }
363 
364     uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
365                               GRALLOC_USAGE_RENDERSCRIPT;
366     uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
367                            GraphicBuffer::USAGE_HW_TEXTURE |
368                            GraphicBuffer::USAGE_HW_COMPOSER;
369     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
370             (consumerUsage & allowedFlags) != 0;
371 
372     surface = new Surface(gbp, useAsync);
373     ANativeWindow *anw = surface.get();
374 
375     int width, height, format;
376     android_dataspace dataSpace;
377     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
378         String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
379                  logicalCameraId.string(), strerror(-err), err);
380         ALOGE("%s: %s", __FUNCTION__, msg.string());
381         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
382     }
383     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
384         String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
385                 logicalCameraId.string(), strerror(-err), err);
386         ALOGE("%s: %s", __FUNCTION__, msg.string());
387         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
388     }
389     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
390         String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
391                 logicalCameraId.string(), strerror(-err), err);
392         ALOGE("%s: %s", __FUNCTION__, msg.string());
393         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
394     }
395     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
396             reinterpret_cast<int*>(&dataSpace))) != OK) {
397         String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
398                 logicalCameraId.string(), strerror(-err), err);
399         ALOGE("%s: %s", __FUNCTION__, msg.string());
400         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
401     }
402 
403     // FIXME: remove this override since the default format should be
404     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
405     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
406             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
407              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
408         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
409                 __FUNCTION__, logicalCameraId.string(), format);
410         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
411     }
412     std::unordered_set<int32_t> overriddenSensorPixelModes;
413     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
414             physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
415         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
416                 "format %#x are not valid",logicalCameraId.string(), format);
417         ALOGE("%s: %s", __FUNCTION__, msg.string());
418         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
419     }
420     bool foundInMaxRes = false;
421     if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
422             overriddenSensorPixelModes.end()) {
423         // we can use the default stream configuration map
424         foundInMaxRes = true;
425     }
426     // Round dimensions to the nearest dimensions available for this format
427     if (flexibleConsumer && isPublicFormat(format) &&
428             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
429             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
430             /*out*/&height)) {
431         String8 msg = String8::format("Camera %s: No supported stream configurations with "
432                 "format %#x defined, failed to create output stream",
433                 logicalCameraId.string(), format);
434         ALOGE("%s: %s", __FUNCTION__, msg.string());
435         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
436     }
437     if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
438                 physicalCameraMetadata)) {
439         String8 msg = String8::format("Camera %s: Dynamic range profile 0x%" PRIx64
440                 " not supported,failed to create output stream", logicalCameraId.string(),
441                 dynamicRangeProfile);
442         ALOGE("%s: %s", __FUNCTION__, msg.string());
443         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
444     }
445     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
446             !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
447         String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
448                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
449                 logicalCameraId.string(), format, dynamicRangeProfile);
450         ALOGE("%s: %s", __FUNCTION__, msg.string());
451         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
452     }
453     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
454             physicalCameraMetadata)) {
455         String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
456                 " failed to create output stream", logicalCameraId.string(), streamUseCase);
457         ALOGE("%s: %s", __FUNCTION__, msg.string());
458         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
459     }
460     if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
461             timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
462         String8 msg = String8::format("Camera %s: invalid timestamp base %d",
463                 logicalCameraId.string(), timestampBase);
464         ALOGE("%s: %s", __FUNCTION__, msg.string());
465         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
466     }
467     if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
468             mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
469         String8 msg = String8::format("Camera %s: invalid mirroring mode %d",
470                 logicalCameraId.string(), mirrorMode);
471         ALOGE("%s: %s", __FUNCTION__, msg.string());
472         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
473     }
474 
475     if (!isStreamInfoValid) {
476         streamInfo.width = width;
477         streamInfo.height = height;
478         streamInfo.format = format;
479         streamInfo.dataSpace = dataSpace;
480         streamInfo.consumerUsage = consumerUsage;
481         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
482         streamInfo.dynamicRangeProfile = dynamicRangeProfile;
483         streamInfo.streamUseCase = streamUseCase;
484         streamInfo.timestampBase = timestampBase;
485         streamInfo.mirrorMode = mirrorMode;
486         return binder::Status::ok();
487     }
488     if (width != streamInfo.width) {
489         String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
490                 logicalCameraId.string(), width, streamInfo.width);
491         ALOGE("%s: %s", __FUNCTION__, msg.string());
492         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
493     }
494     if (height != streamInfo.height) {
495         String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
496                  logicalCameraId.string(), height, streamInfo.height);
497         ALOGE("%s: %s", __FUNCTION__, msg.string());
498         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
499     }
500     if (format != streamInfo.format) {
501         String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
502                  logicalCameraId.string(), format, streamInfo.format);
503         ALOGE("%s: %s", __FUNCTION__, msg.string());
504         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
505     }
506     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
507         if (dataSpace != streamInfo.dataSpace) {
508             String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
509                     logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
510             ALOGE("%s: %s", __FUNCTION__, msg.string());
511             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
512         }
513         //At the native side, there isn't a way to check whether 2 surfaces come from the same
514         //surface class type. Use usage flag to approximate the comparison.
515         if (consumerUsage != streamInfo.consumerUsage) {
516             String8 msg = String8::format(
517                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
518                     logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
519             ALOGE("%s: %s", __FUNCTION__, msg.string());
520             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
521         }
522     }
523     return binder::Status::ok();
524 }
525 
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,String8 physicalId,int32_t groupId,aidl::android::hardware::camera::device::Stream * stream)526 void mapStreamInfo(const OutputStreamInfo &streamInfo,
527             camera3::camera_stream_rotation_t rotation, String8 physicalId,
528             int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
529     if (stream == nullptr) {
530         return;
531     }
532 
533     stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
534     stream->width = streamInfo.width;
535     stream->height = streamInfo.height;
536     stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
537     auto u = streamInfo.consumerUsage;
538     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
539     stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
540     stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
541     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
542     stream->id = -1; // Invalid stream id
543     stream->physicalCameraId = std::string(physicalId.string());
544     stream->bufferSize = 0;
545     stream->groupId = groupId;
546     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
547     size_t idx = 0;
548     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
549     for (auto mode : streamInfo.sensorPixelModesUsed) {
550         stream->sensorPixelModesUsed[idx++] =
551                 static_cast<SensorPixelMode>(mode);
552     }
553     using DynamicRangeProfile =
554             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
555     stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
556     using StreamUseCases =
557             aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
558     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
559 }
560 
561 binder::Status
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const String8 & logicalCameraId,const CameraMetadata & deviceInfo,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,bool * earlyExit)562 convertToHALStreamCombination(
563         const SessionConfiguration& sessionConfiguration,
564         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
565         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
566         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
567         bool overrideForPerfClass, bool *earlyExit) {
568     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
569     auto operatingMode = sessionConfiguration.getOperatingMode();
570     binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
571     if (!res.isOk()) {
572         return res;
573     }
574 
575     if (earlyExit == nullptr) {
576         String8 msg("earlyExit nullptr");
577         ALOGE("%s: %s", __FUNCTION__, msg.string());
578         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
579     }
580     *earlyExit = false;
581     auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
582             static_cast<camera_stream_configuration_mode_t> (operatingMode),
583             /*out*/ &streamConfiguration.operationMode);
584     if (ret != OK) {
585         String8 msg = String8::format(
586             "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
587             logicalCameraId.string(), operatingMode, strerror(-ret), ret);
588         ALOGE("%s: %s", __FUNCTION__, msg.string());
589         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
590                 msg.string());
591     }
592 
593     bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
594             (sessionConfiguration.getInputHeight() > 0) &&
595             (sessionConfiguration.getInputFormat() > 0);
596     auto outputConfigs = sessionConfiguration.getOutputConfigurations();
597     size_t streamCount = outputConfigs.size();
598     streamCount = isInputValid ? streamCount + 1 : streamCount;
599     streamConfiguration.streams.resize(streamCount);
600     size_t streamIdx = 0;
601     if (isInputValid) {
602         std::vector<SensorPixelMode> defaultSensorPixelModes;
603         defaultSensorPixelModes.resize(1);
604         defaultSensorPixelModes[0] =
605                 static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
606         aidl::android::hardware::camera::device::Stream stream;
607         stream.id = 0;
608         stream.streamType =  aidl::android::hardware::camera::device::StreamType::INPUT;
609         stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
610         stream.height =  static_cast<uint32_t> (sessionConfiguration.getInputHeight());
611         stream.format =
612                 AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
613                         sessionConfiguration.getInputFormat());
614         stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
615         stream.dataSpace =
616               static_cast<aidl::android::hardware::graphics::common::Dataspace>(
617                       HAL_DATASPACE_UNKNOWN);
618         stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
619         stream.bufferSize = 0;
620         stream.groupId = -1;
621         stream.sensorPixelModesUsed = defaultSensorPixelModes;
622         using DynamicRangeProfile =
623             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
624         stream.dynamicRangeProfile =
625             DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
626         streamConfiguration.streams[streamIdx++] = stream;
627         streamConfiguration.multiResolutionInputImage =
628                 sessionConfiguration.inputIsMultiResolution();
629     }
630 
631     for (const auto &it : outputConfigs) {
632         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
633             it.getGraphicBufferProducers();
634         bool deferredConsumer = it.isDeferred();
635         String8 physicalCameraId = String8(it.getPhysicalCameraId());
636 
637         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
638         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
639         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
640                 overrideForPerfClass);
641         const CameraMetadata &metadataChosen =
642                 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
643 
644         size_t numBufferProducers = bufferProducers.size();
645         bool isStreamInfoValid = false;
646         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
647         OutputStreamInfo streamInfo;
648 
649         res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
650         if (!res.isOk()) {
651             return res;
652         }
653         res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
654                 logicalCameraId);
655         if (!res.isOk()) {
656             return res;
657         }
658 
659         int64_t streamUseCase = it.getStreamUseCase();
660         int timestampBase = it.getTimestampBase();
661         int mirrorMode = it.getMirrorMode();
662         if (deferredConsumer) {
663             streamInfo.width = it.getWidth();
664             streamInfo.height = it.getHeight();
665             streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
666             streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
667             auto surfaceType = it.getSurfaceType();
668             streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
669             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
670                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
671             }
672             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
673             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
674                     streamInfo.format, streamInfo.width,
675                     streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
676                     &streamInfo.sensorPixelModesUsed) != OK) {
677                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
678                                 __FUNCTION__);
679                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
680                                 "Deferred surface sensor pixel modes not valid");
681             }
682             streamInfo.streamUseCase = streamUseCase;
683             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
684                     &streamConfiguration.streams[streamIdx++]);
685             isStreamInfoValid = true;
686 
687             if (numBufferProducers == 0) {
688                 continue;
689             }
690         }
691 
692         for (auto& bufferProducer : bufferProducers) {
693             sp<Surface> surface;
694             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
695                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
696                     streamUseCase, timestampBase, mirrorMode);
697 
698             if (!res.isOk())
699                 return res;
700 
701             if (!isStreamInfoValid) {
702                 bool isDepthCompositeStream =
703                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
704                 bool isHeicCompositeStream =
705                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
706                 if (isDepthCompositeStream || isHeicCompositeStream) {
707                     // We need to take in to account that composite streams can have
708                     // additional internal camera streams.
709                     std::vector<OutputStreamInfo> compositeStreams;
710                     if (isDepthCompositeStream) {
711                       // TODO: Take care of composite streams.
712                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
713                                 deviceInfo, &compositeStreams);
714                     } else {
715                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
716                             deviceInfo, &compositeStreams);
717                     }
718                     if (ret != OK) {
719                         String8 msg = String8::format(
720                                 "Camera %s: Failed adding composite streams: %s (%d)",
721                                 logicalCameraId.string(), strerror(-ret), ret);
722                         ALOGE("%s: %s", __FUNCTION__, msg.string());
723                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
724                     }
725 
726                     if (compositeStreams.size() == 0) {
727                         // No internal streams means composite stream not
728                         // supported.
729                         *earlyExit = true;
730                         return binder::Status::ok();
731                     } else if (compositeStreams.size() > 1) {
732                         streamCount += compositeStreams.size() - 1;
733                         streamConfiguration.streams.resize(streamCount);
734                     }
735 
736                     for (const auto& compositeStream : compositeStreams) {
737                         mapStreamInfo(compositeStream,
738                                 static_cast<camera_stream_rotation_t> (it.getRotation()),
739                                 physicalCameraId, groupId,
740                                 &streamConfiguration.streams[streamIdx++]);
741                     }
742                 } else {
743                     mapStreamInfo(streamInfo,
744                             static_cast<camera_stream_rotation_t> (it.getRotation()),
745                             physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
746                 }
747                 isStreamInfoValid = true;
748             }
749         }
750     }
751     return binder::Status::ok();
752 }
753 
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const String8 & physicalCameraId,const String8 & logicalCameraId)754 binder::Status checkPhysicalCameraId(
755         const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
756         const String8 &logicalCameraId) {
757     if (physicalCameraId.size() == 0) {
758         return binder::Status::ok();
759     }
760     if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
761         physicalCameraId.string()) == physicalCameraIds.end()) {
762         String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
763                 logicalCameraId.string(), physicalCameraId.string());
764         ALOGE("%s: %s", __FUNCTION__, msg.string());
765         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
766     }
767     return binder::Status::ok();
768 }
769 
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType)770 binder::Status checkSurfaceType(size_t numBufferProducers,
771         bool deferredConsumer, int surfaceType)  {
772     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
773         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
774                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
775         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
776     } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
777         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
778         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
779     }
780 
781     bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
782             (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
783 
784     if (deferredConsumer && !validSurfaceType) {
785         ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
786         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
787     }
788 
789     return binder::Status::ok();
790 }
791 
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const String8 & cameraId)792 binder::Status checkOperatingMode(int operatingMode,
793         const CameraMetadata &staticInfo, const String8 &cameraId) {
794     if (operatingMode < 0) {
795         String8 msg = String8::format(
796             "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
797         ALOGE("%s: %s", __FUNCTION__, msg.string());
798         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
799                 msg.string());
800     }
801 
802     bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
803     if (isConstrainedHighSpeed) {
804         camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
805         bool isConstrainedHighSpeedSupported = false;
806         for(size_t i = 0; i < entry.count; ++i) {
807             uint8_t capability = entry.data.u8[i];
808             if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
809                 isConstrainedHighSpeedSupported = true;
810                 break;
811             }
812         }
813         if (!isConstrainedHighSpeedSupported) {
814             String8 msg = String8::format(
815                 "Camera %s: Try to create a constrained high speed configuration on a device"
816                 " that doesn't support it.", cameraId.string());
817             ALOGE("%s: %s", __FUNCTION__, msg.string());
818             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
819                     msg.string());
820         }
821     }
822 
823     return binder::Status::ok();
824 }
825 
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)826 static bool inStreamConfigurationMap(int format, int width, int height,
827         const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
828     auto scs = sm.find(format);
829     if (scs == sm.end()) {
830         return false;
831     }
832     for (auto &sc : scs->second) {
833         if (sc.width == width && sc.height == height && sc.isInput == 0) {
834             return true;
835         }
836     }
837     return false;
838 }
839 
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)840 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
841     return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
842 }
843 
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,bool flexibleConsumer,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)844 status_t checkAndOverrideSensorPixelModesUsed(
845         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
846         const CameraMetadata &staticInfo, bool flexibleConsumer,
847         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
848 
849     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
850             convertToSet(sensorPixelModesUsed);
851     if (!isUltraHighResolutionSensor(staticInfo)) {
852         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
853                 sensorPixelModesUsedSet.end()) {
854             // invalid value for non ultra high res sensors
855             return BAD_VALUE;
856         }
857         overriddenSensorPixelModesUsed->clear();
858         overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
859         return OK;
860     }
861 
862     StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
863 
864     bool isInDefaultStreamConfigurationMap =
865             inStreamConfigurationMap(format, width, height,
866                     streamConfigurationPair.mDefaultStreamConfigurationMap);
867 
868     bool isInMaximumResolutionStreamConfigurationMap =
869             inStreamConfigurationMap(format, width, height,
870                     streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
871 
872     // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
873     // size + format of the OutputConfiguration is found exclusively in 1.
874     // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
875     // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
876     // compatibility.
877     if (sensorPixelModesUsedSet.size() == 0) {
878         // Ambiguous case, default to only 'DEFAULT' mode.
879         if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
880             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
881             return OK;
882         }
883         // We don't allow flexible consumer for max resolution mode.
884         if (isInMaximumResolutionStreamConfigurationMap) {
885             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
886             return OK;
887         }
888         if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
889             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
890             return OK;
891         }
892         return BAD_VALUE;
893     }
894 
895     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
896     // are valid / err out.
897     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
898             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
899         return BAD_VALUE;
900     }
901 
902    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
903             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
904         return BAD_VALUE;
905     }
906     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
907     return OK;
908 }
909 
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)910 bool targetPerfClassPrimaryCamera(
911         const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
912         int targetSdkVersion) {
913     bool isPerfClassPrimaryCamera =
914             perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
915     return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
916 }
917 
918 } // namespace SessionConfigurationUtils
919 } // namespace camera3
920 } // namespace android
921