• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <cutils/properties.h>
18 
19 #include "SessionConfigurationUtils.h"
20 #include "../api2/DepthCompositeStream.h"
21 #include "../api2/HeicCompositeStream.h"
22 #include "aidl/android/hardware/graphics/common/Dataspace.h"
23 #include "api2/JpegRCompositeStream.h"
24 #include "common/CameraDeviceBase.h"
25 #include "common/HalConversionsTemplated.h"
26 #include "../CameraService.h"
27 #include "device3/aidl/AidlCamera3Device.h"
28 #include "device3/hidl/HidlCamera3Device.h"
29 #include "device3/Camera3OutputStream.h"
30 #include "system/graphics-base-v1.1.h"
31 #include <ui/PublicFormat.h>
32 
33 using android::camera3::OutputStreamInfo;
34 using android::camera3::OutputStreamInfo;
35 using android::hardware::camera2::ICameraDeviceUser;
36 
37 namespace android {
38 namespace camera3 {
39 
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)40 void StreamConfiguration::getStreamConfigurations(
41         const CameraMetadata &staticInfo, int configuration,
42         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
43     if (scm == nullptr) {
44         ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
45         return;
46     }
47     const int STREAM_FORMAT_OFFSET = 0;
48     const int STREAM_WIDTH_OFFSET = 1;
49     const int STREAM_HEIGHT_OFFSET = 2;
50     const int STREAM_IS_INPUT_OFFSET = 3;
51 
52     camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
53     for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
54         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
55         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
56         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
57         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
58         StreamConfiguration sc = {format, width, height, isInput};
59         (*scm)[format].push_back(sc);
60     }
61 }
62 
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)63 void StreamConfiguration::getStreamConfigurations(
64         const CameraMetadata &staticInfo, bool maxRes,
65         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
66     int32_t scalerKey =
67             SessionConfigurationUtils::getAppropriateModeTag(
68                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
69 
70     int32_t depthKey =
71             SessionConfigurationUtils::getAppropriateModeTag(
72                     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
73 
74     int32_t dynamicDepthKey =
75             SessionConfigurationUtils::getAppropriateModeTag(
76                     ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
77 
78     int32_t heicKey =
79             SessionConfigurationUtils::getAppropriateModeTag(
80                     ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
81 
82     getStreamConfigurations(staticInfo, scalerKey, scm);
83     getStreamConfigurations(staticInfo, depthKey, scm);
84     getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
85     getStreamConfigurations(staticInfo, heicKey, scm);
86 }
87 
88 namespace SessionConfigurationUtils {
89 
90 int32_t PERF_CLASS_LEVEL =
91         property_get_int32("ro.odm.build.media_performance_class", 0);
92 
93 bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
94 
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)95 camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
96         bool ultraHighResolution) {
97     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
98     const int STREAM_CONFIGURATION_SIZE = 4;
99     const int STREAM_FORMAT_OFFSET = 0;
100     const int STREAM_WIDTH_OFFSET = 1;
101     const int STREAM_HEIGHT_OFFSET = 2;
102     const int STREAM_IS_INPUT_OFFSET = 3;
103 
104     int32_t scalerSizesTag = ultraHighResolution ?
105             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
106                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
107     camera_metadata_ro_entry_t availableStreamConfigs =
108             metadata.find(scalerSizesTag);
109     if (availableStreamConfigs.count == 0 ||
110             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
111         return camera3::Size(0, 0);
112     }
113 
114     // Get max jpeg size (area-wise).
115     for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
116         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
117         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
118         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
119         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
120         if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
121                 && format == HAL_PIXEL_FORMAT_BLOB &&
122                 (width * height > maxJpegWidth * maxJpegHeight)) {
123             maxJpegWidth = width;
124             maxJpegHeight = height;
125         }
126     }
127 
128     return camera3::Size(maxJpegWidth, maxJpegHeight);
129 }
130 
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)131 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
132         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
133     return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
134             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
135 }
136 
137 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)138 getStreamConfigurationPair(const CameraMetadata &staticInfo) {
139     camera3::StreamConfigurationPair streamConfigurationPair;
140     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
141             &streamConfigurationPair.mDefaultStreamConfigurationMap);
142     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
143             &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
144     return streamConfigurationPair;
145 }
146 
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)147 int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
148     int64_t d0 = x0 - x1;
149     int64_t d1 = y0 - y1;
150     return d0 * d0 + d1 * d1;
151 }
152 
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)153 bool roundBufferDimensionNearest(int32_t width, int32_t height,
154         int32_t format, android_dataspace dataSpace,
155         const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
156         /*out*/int32_t* outHeight) {
157     const int32_t depthSizesTag =
158             getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
159                     maxResolution);
160     const int32_t scalerSizesTag =
161             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
162     const int32_t heicSizesTag =
163             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
164     const int32_t jpegRSizesTag = getAppropriateModeTag(
165             ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
166 
167     bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
168                 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
169     camera_metadata_ro_entry streamConfigs =
170             (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
171             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
172             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
173             info.find(heicSizesTag) :
174             info.find(scalerSizesTag);
175 
176     int32_t bestWidth = -1;
177     int32_t bestHeight = -1;
178 
179     // Iterate through listed stream configurations and find the one with the smallest euclidean
180     // distance from the given dimensions for the given format.
181     for (size_t i = 0; i < streamConfigs.count; i += 4) {
182         int32_t fmt = streamConfigs.data.i32[i];
183         int32_t w = streamConfigs.data.i32[i + 1];
184         int32_t h = streamConfigs.data.i32[i + 2];
185 
186         // Ignore input/output type for now
187         if (fmt == format) {
188             if (w == width && h == height) {
189                 bestWidth = width;
190                 bestHeight = height;
191                 break;
192             } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
193                     SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
194                     SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
195                             height))) {
196                 bestWidth = w;
197                 bestHeight = h;
198             }
199         }
200     }
201 
202     if (bestWidth == -1) {
203         // Return false if no configurations for this format were listed
204         ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
205                 __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
206         return false;
207     }
208 
209     // Set the outputs to the closet width/height
210     if (outWidth != NULL) {
211         *outWidth = bestWidth;
212     }
213     if (outHeight != NULL) {
214         *outHeight = bestHeight;
215     }
216 
217     // Return true if at least one configuration for this format was listed
218     return true;
219 }
220 
221 //check if format is 10-bit compatible
is10bitCompatibleFormat(int32_t format,android_dataspace_t dataSpace)222 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
223     switch(format) {
224         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
225         case HAL_PIXEL_FORMAT_YCBCR_P010:
226             return true;
227         case HAL_PIXEL_FORMAT_BLOB:
228             if (dataSpace == static_cast<android_dataspace_t>(
229                         ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
230                 return true;
231             }
232 
233             return false;
234         default:
235             return false;
236     }
237 }
238 
isDynamicRangeProfileSupported(int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)239 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
240     if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
241         // Supported by default
242         return true;
243     }
244 
245     camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
246     bool is10bitDynamicRangeSupported = false;
247     for (size_t i = 0; i < entry.count; ++i) {
248         uint8_t capability = entry.data.u8[i];
249         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
250             is10bitDynamicRangeSupported = true;
251             break;
252         }
253     }
254 
255     if (!is10bitDynamicRangeSupported) {
256         return false;
257     }
258 
259     switch (dynamicRangeProfile) {
260         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
261         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
262         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
263         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
264         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
265         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
266         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
267         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
268         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
269         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
270         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
271             entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
272             for (size_t i = 0; i < entry.count; i += 3) {
273                 if (dynamicRangeProfile == entry.data.i64[i]) {
274                     return true;
275                 }
276             }
277 
278             return false;
279         default:
280             return false;
281     }
282 
283     return false;
284 }
285 
286 //check if format is 10-bit compatible
is10bitDynamicRangeProfile(int64_t dynamicRangeProfile)287 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
288     switch (dynamicRangeProfile) {
289         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
290         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
291         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
292         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
293         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
294         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
295         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
296             return true;
297         default:
298             return false;
299     }
300 }
301 
deviceReportsColorSpaces(const CameraMetadata & staticInfo)302 bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
303     camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
304     for (size_t i = 0; i < entry.count; ++i) {
305         uint8_t capability = entry.data.u8[i];
306         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
307             return true;
308         }
309     }
310 
311     return false;
312 }
313 
isColorSpaceSupported(int32_t colorSpace,int32_t format,android_dataspace dataSpace,int64_t dynamicRangeProfile,const CameraMetadata & staticInfo)314 bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
315         int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
316     int64_t colorSpace64 = colorSpace;
317     int64_t format64 = format;
318 
319     // Translate HAL format + data space to public format
320     if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
321         format64 = 0x100; // JPEG
322     } else if (format == HAL_PIXEL_FORMAT_BLOB
323             && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
324         format64 = 0x48454946; // HEIC
325     } else if (format == HAL_PIXEL_FORMAT_BLOB
326             && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
327         format64 = 0x69656963; // DEPTH_JPEG
328     } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
329         return false; // DEPTH_POINT_CLOUD, not applicable
330     } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
331         return false; // DEPTH16, not applicable
332     } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
333         return false; // RAW_DEPTH, not applicable
334     } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
335         return false; // RAW_DEPTH10, not applicable
336     } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
337             static_cast<android_dataspace>(
338                 ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
339         format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
340     }
341 
342     camera_metadata_ro_entry_t entry =
343             staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
344     for (size_t i = 0; i < entry.count; i += 3) {
345         bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
346         bool isDynamicProfileCompatible =
347                 (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
348 
349         if (colorSpace64 == entry.data.i64[i]
350                 && isFormatCompatible
351                 && isDynamicProfileCompatible) {
352             return true;
353         }
354     }
355 
356     ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
357             " combination not found", colorSpace, format64, dynamicRangeProfile);
358     return false;
359 }
360 
isPublicFormat(int32_t format)361 bool isPublicFormat(int32_t format)
362 {
363     switch(format) {
364         case HAL_PIXEL_FORMAT_RGBA_8888:
365         case HAL_PIXEL_FORMAT_RGBX_8888:
366         case HAL_PIXEL_FORMAT_RGB_888:
367         case HAL_PIXEL_FORMAT_RGB_565:
368         case HAL_PIXEL_FORMAT_BGRA_8888:
369         case HAL_PIXEL_FORMAT_YV12:
370         case HAL_PIXEL_FORMAT_Y8:
371         case HAL_PIXEL_FORMAT_Y16:
372         case HAL_PIXEL_FORMAT_RAW16:
373         case HAL_PIXEL_FORMAT_RAW10:
374         case HAL_PIXEL_FORMAT_RAW12:
375         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
376         case HAL_PIXEL_FORMAT_BLOB:
377         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
378         case HAL_PIXEL_FORMAT_YCbCr_420_888:
379         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
380         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
381         case HAL_PIXEL_FORMAT_YCbCr_422_I:
382             return true;
383         default:
384             return false;
385     }
386 }
387 
dataSpaceFromColorSpace(android_dataspace * dataSpace,int32_t colorSpace)388 bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
389     switch (colorSpace) {
390         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
391             *dataSpace = HAL_DATASPACE_V0_SRGB;
392             return true;
393         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
394             *dataSpace = HAL_DATASPACE_DISPLAY_P3;
395             return true;
396         case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
397             *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
398             return true;
399         default:
400             ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
401             return false;
402     }
403 }
404 
isStreamUseCaseSupported(int64_t streamUseCase,const CameraMetadata & deviceInfo)405 bool isStreamUseCaseSupported(int64_t streamUseCase,
406         const CameraMetadata &deviceInfo) {
407     camera_metadata_ro_entry_t availableStreamUseCases =
408             deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
409 
410     if (availableStreamUseCases.count == 0 &&
411             streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
412         return true;
413     }
414     // Allow vendor stream use case unconditionally.
415     if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
416         return true;
417     }
418 
419     for (size_t i = 0; i < availableStreamUseCases.count; i++) {
420         if (availableStreamUseCases.data.i64[i] == streamUseCase) {
421             return true;
422         }
423     }
424     return false;
425 }
426 
createSurfaceFromGbp(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & surface,const sp<IGraphicBufferProducer> & gbp,const String8 & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed,int64_t dynamicRangeProfile,int64_t streamUseCase,int timestampBase,int mirrorMode,int32_t colorSpace)427 binder::Status createSurfaceFromGbp(
428         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
429         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
430         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
431         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
432         int64_t streamUseCase, int timestampBase, int mirrorMode,
433         int32_t colorSpace) {
434     // bufferProducer must be non-null
435     if (gbp == nullptr) {
436         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
437         ALOGW("%s: %s", __FUNCTION__, msg.string());
438         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
439     }
440     // HACK b/10949105
441     // Query consumer usage bits to set async operation mode for
442     // GLConsumer using controlledByApp parameter.
443     bool useAsync = false;
444     uint64_t consumerUsage = 0;
445     status_t err;
446     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
447         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
448                 logicalCameraId.string(), strerror(-err), err);
449         ALOGE("%s: %s", __FUNCTION__, msg.string());
450         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
451     }
452     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
453         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
454                 "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
455         useAsync = true;
456     }
457 
458     uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
459                               GRALLOC_USAGE_RENDERSCRIPT;
460     uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
461                            GraphicBuffer::USAGE_HW_TEXTURE |
462                            GraphicBuffer::USAGE_HW_COMPOSER;
463     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
464             (consumerUsage & allowedFlags) != 0;
465 
466     surface = new Surface(gbp, useAsync);
467     ANativeWindow *anw = surface.get();
468 
469     int width, height, format;
470     android_dataspace dataSpace;
471     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
472         String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
473                  logicalCameraId.string(), strerror(-err), err);
474         ALOGE("%s: %s", __FUNCTION__, msg.string());
475         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
476     }
477     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
478         String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
479                 logicalCameraId.string(), strerror(-err), err);
480         ALOGE("%s: %s", __FUNCTION__, msg.string());
481         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
482     }
483     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
484         String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
485                 logicalCameraId.string(), strerror(-err), err);
486         ALOGE("%s: %s", __FUNCTION__, msg.string());
487         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
488     }
489     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
490             reinterpret_cast<int*>(&dataSpace))) != OK) {
491         String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
492                 logicalCameraId.string(), strerror(-err), err);
493         ALOGE("%s: %s", __FUNCTION__, msg.string());
494         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
495     }
496 
497     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
498             format != HAL_PIXEL_FORMAT_BLOB) {
499         if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
500             String8 msg = String8::format("Camera %s: color space %d not supported, failed to "
501                     "convert to data space", logicalCameraId.string(), colorSpace);
502             ALOGE("%s: %s", __FUNCTION__, msg.string());
503             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
504         }
505     }
506 
507     // FIXME: remove this override since the default format should be
508     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
509     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
510             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
511              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
512         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
513                 __FUNCTION__, logicalCameraId.string(), format);
514         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
515     }
516     std::unordered_set<int32_t> overriddenSensorPixelModes;
517     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
518             physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
519         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
520                 "format %#x are not valid",logicalCameraId.string(), format);
521         ALOGE("%s: %s", __FUNCTION__, msg.string());
522         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
523     }
524     bool foundInMaxRes = false;
525     if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
526             overriddenSensorPixelModes.end()) {
527         // we can use the default stream configuration map
528         foundInMaxRes = true;
529     }
530     // Round dimensions to the nearest dimensions available for this format
531     if (flexibleConsumer && isPublicFormat(format) &&
532             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
533             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
534             /*out*/&height)) {
535         String8 msg = String8::format("Camera %s: No supported stream configurations with "
536                 "format %#x defined, failed to create output stream",
537                 logicalCameraId.string(), format);
538         ALOGE("%s: %s", __FUNCTION__, msg.string());
539         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
540     }
541     if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
542                 physicalCameraMetadata)) {
543         String8 msg = String8::format("Camera %s: Dynamic range profile 0x%" PRIx64
544                 " not supported,failed to create output stream", logicalCameraId.string(),
545                 dynamicRangeProfile);
546         ALOGE("%s: %s", __FUNCTION__, msg.string());
547         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
548     }
549     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
550             !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
551         String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
552                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
553                 logicalCameraId.string(), format, dynamicRangeProfile);
554         ALOGE("%s: %s", __FUNCTION__, msg.string());
555         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
556     }
557     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
558             SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
559             !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
560                     dynamicRangeProfile, physicalCameraMetadata)) {
561         String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
562                 "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
563                 logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
564         ALOGE("%s: %s", __FUNCTION__, msg.string());
565         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
566     }
567     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
568             physicalCameraMetadata)) {
569         String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
570                 " failed to create output stream", logicalCameraId.string(), streamUseCase);
571         ALOGE("%s: %s", __FUNCTION__, msg.string());
572         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
573     }
574     if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
575             timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
576         String8 msg = String8::format("Camera %s: invalid timestamp base %d",
577                 logicalCameraId.string(), timestampBase);
578         ALOGE("%s: %s", __FUNCTION__, msg.string());
579         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
580     }
581     if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
582             mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
583         String8 msg = String8::format("Camera %s: invalid mirroring mode %d",
584                 logicalCameraId.string(), mirrorMode);
585         ALOGE("%s: %s", __FUNCTION__, msg.string());
586         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
587     }
588 
589     if (!isStreamInfoValid) {
590         streamInfo.width = width;
591         streamInfo.height = height;
592         streamInfo.format = format;
593         streamInfo.dataSpace = dataSpace;
594         streamInfo.consumerUsage = consumerUsage;
595         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
596         streamInfo.dynamicRangeProfile = dynamicRangeProfile;
597         streamInfo.streamUseCase = streamUseCase;
598         streamInfo.timestampBase = timestampBase;
599         streamInfo.mirrorMode = mirrorMode;
600         streamInfo.colorSpace = colorSpace;
601         return binder::Status::ok();
602     }
603     if (width != streamInfo.width) {
604         String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
605                 logicalCameraId.string(), width, streamInfo.width);
606         ALOGE("%s: %s", __FUNCTION__, msg.string());
607         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
608     }
609     if (height != streamInfo.height) {
610         String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
611                  logicalCameraId.string(), height, streamInfo.height);
612         ALOGE("%s: %s", __FUNCTION__, msg.string());
613         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
614     }
615     if (format != streamInfo.format) {
616         String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
617                  logicalCameraId.string(), format, streamInfo.format);
618         ALOGE("%s: %s", __FUNCTION__, msg.string());
619         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
620     }
621     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
622         if (dataSpace != streamInfo.dataSpace) {
623             String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
624                     logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
625             ALOGE("%s: %s", __FUNCTION__, msg.string());
626             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
627         }
628         //At the native side, there isn't a way to check whether 2 surfaces come from the same
629         //surface class type. Use usage flag to approximate the comparison.
630         if (consumerUsage != streamInfo.consumerUsage) {
631             String8 msg = String8::format(
632                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
633                     logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
634             ALOGE("%s: %s", __FUNCTION__, msg.string());
635             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
636         }
637     }
638     return binder::Status::ok();
639 }
640 
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,String8 physicalId,int32_t groupId,aidl::android::hardware::camera::device::Stream * stream)641 void mapStreamInfo(const OutputStreamInfo &streamInfo,
642             camera3::camera_stream_rotation_t rotation, String8 physicalId,
643             int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
644     if (stream == nullptr) {
645         return;
646     }
647 
648     stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
649     stream->width = streamInfo.width;
650     stream->height = streamInfo.height;
651     stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
652     auto u = streamInfo.consumerUsage;
653     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
654     stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
655     stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
656     stream->colorSpace = streamInfo.colorSpace;
657     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
658     stream->id = -1; // Invalid stream id
659     stream->physicalCameraId = std::string(physicalId.string());
660     stream->bufferSize = 0;
661     stream->groupId = groupId;
662     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
663     size_t idx = 0;
664     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
665     for (auto mode : streamInfo.sensorPixelModesUsed) {
666         stream->sensorPixelModesUsed[idx++] =
667                 static_cast<SensorPixelMode>(mode);
668     }
669     using DynamicRangeProfile =
670             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
671     stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
672     using StreamUseCases =
673             aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
674     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
675 }
676 
677 binder::Status
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const String8 & logicalCameraId,const CameraMetadata & deviceInfo,bool isCompositeJpegRDisabled,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,aidl::android::hardware::camera::device::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,bool * earlyExit)678 convertToHALStreamCombination(
679         const SessionConfiguration& sessionConfiguration,
680         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
681         bool isCompositeJpegRDisabled,
682         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
683         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
684         bool overrideForPerfClass, bool *earlyExit) {
685     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
686     auto operatingMode = sessionConfiguration.getOperatingMode();
687     binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
688     if (!res.isOk()) {
689         return res;
690     }
691 
692     if (earlyExit == nullptr) {
693         String8 msg("earlyExit nullptr");
694         ALOGE("%s: %s", __FUNCTION__, msg.string());
695         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
696     }
697     *earlyExit = false;
698     auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
699             static_cast<camera_stream_configuration_mode_t> (operatingMode),
700             /*out*/ &streamConfiguration.operationMode);
701     if (ret != OK) {
702         String8 msg = String8::format(
703             "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
704             logicalCameraId.string(), operatingMode, strerror(-ret), ret);
705         ALOGE("%s: %s", __FUNCTION__, msg.string());
706         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
707                 msg.string());
708     }
709 
710     bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
711             (sessionConfiguration.getInputHeight() > 0) &&
712             (sessionConfiguration.getInputFormat() > 0);
713     auto outputConfigs = sessionConfiguration.getOutputConfigurations();
714     size_t streamCount = outputConfigs.size();
715     streamCount = isInputValid ? streamCount + 1 : streamCount;
716     streamConfiguration.streams.resize(streamCount);
717     size_t streamIdx = 0;
718     if (isInputValid) {
719         std::vector<SensorPixelMode> defaultSensorPixelModes;
720         defaultSensorPixelModes.resize(1);
721         defaultSensorPixelModes[0] =
722                 static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
723         aidl::android::hardware::camera::device::Stream stream;
724         stream.id = 0;
725         stream.streamType =  aidl::android::hardware::camera::device::StreamType::INPUT;
726         stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
727         stream.height =  static_cast<uint32_t> (sessionConfiguration.getInputHeight());
728         stream.format =
729                 AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
730                         sessionConfiguration.getInputFormat());
731         stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
732         stream.dataSpace =
733               static_cast<aidl::android::hardware::graphics::common::Dataspace>(
734                       HAL_DATASPACE_UNKNOWN);
735         stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
736         stream.bufferSize = 0;
737         stream.groupId = -1;
738         stream.sensorPixelModesUsed = defaultSensorPixelModes;
739         using DynamicRangeProfile =
740             aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
741         stream.dynamicRangeProfile =
742             DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
743         streamConfiguration.streams[streamIdx++] = stream;
744         streamConfiguration.multiResolutionInputImage =
745                 sessionConfiguration.inputIsMultiResolution();
746     }
747 
748     for (const auto &it : outputConfigs) {
749         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
750             it.getGraphicBufferProducers();
751         bool deferredConsumer = it.isDeferred();
752         String8 physicalCameraId = String8(it.getPhysicalCameraId());
753 
754         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
755         int32_t colorSpace = it.getColorSpace();
756         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
757         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
758                 overrideForPerfClass);
759         const CameraMetadata &metadataChosen =
760                 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
761 
762         size_t numBufferProducers = bufferProducers.size();
763         bool isStreamInfoValid = false;
764         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
765         OutputStreamInfo streamInfo;
766 
767         res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
768         if (!res.isOk()) {
769             return res;
770         }
771         res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
772                 logicalCameraId);
773         if (!res.isOk()) {
774             return res;
775         }
776 
777         int64_t streamUseCase = it.getStreamUseCase();
778         int timestampBase = it.getTimestampBase();
779         int mirrorMode = it.getMirrorMode();
780         if (deferredConsumer) {
781             streamInfo.width = it.getWidth();
782             streamInfo.height = it.getHeight();
783             streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
784             streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
785             auto surfaceType = it.getSurfaceType();
786             streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
787             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
788                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
789             }
790             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
791             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
792                     streamInfo.format, streamInfo.width,
793                     streamInfo.height, metadataChosen,
794                     &streamInfo.sensorPixelModesUsed) != OK) {
795                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
796                                 __FUNCTION__);
797                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
798                                 "Deferred surface sensor pixel modes not valid");
799             }
800             streamInfo.streamUseCase = streamUseCase;
801             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
802                     &streamConfiguration.streams[streamIdx++]);
803             isStreamInfoValid = true;
804 
805             if (numBufferProducers == 0) {
806                 continue;
807             }
808         }
809 
810         for (auto& bufferProducer : bufferProducers) {
811             sp<Surface> surface;
812             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
813                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
814                     streamUseCase, timestampBase, mirrorMode, colorSpace);
815 
816             if (!res.isOk())
817                 return res;
818 
819             if (!isStreamInfoValid) {
820                 bool isDepthCompositeStream =
821                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
822                 bool isHeicCompositeStream =
823                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
824                 bool isJpegRCompositeStream =
825                         camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
826                         !isCompositeJpegRDisabled;
827                 if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
828                     // We need to take in to account that composite streams can have
829                     // additional internal camera streams.
830                     std::vector<OutputStreamInfo> compositeStreams;
831                     if (isDepthCompositeStream) {
832                       // TODO: Take care of composite streams.
833                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
834                                 deviceInfo, &compositeStreams);
835                     } else if (isHeicCompositeStream) {
836                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
837                             deviceInfo, &compositeStreams);
838                     } else {
839                         ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
840                             deviceInfo, &compositeStreams);
841                     }
842 
843                     if (ret != OK) {
844                         String8 msg = String8::format(
845                                 "Camera %s: Failed adding composite streams: %s (%d)",
846                                 logicalCameraId.string(), strerror(-ret), ret);
847                         ALOGE("%s: %s", __FUNCTION__, msg.string());
848                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
849                     }
850 
851                     if (compositeStreams.size() == 0) {
852                         // No internal streams means composite stream not
853                         // supported.
854                         *earlyExit = true;
855                         return binder::Status::ok();
856                     } else if (compositeStreams.size() > 1) {
857                         streamCount += compositeStreams.size() - 1;
858                         streamConfiguration.streams.resize(streamCount);
859                     }
860 
861                     for (const auto& compositeStream : compositeStreams) {
862                         mapStreamInfo(compositeStream,
863                                 static_cast<camera_stream_rotation_t> (it.getRotation()),
864                                 physicalCameraId, groupId,
865                                 &streamConfiguration.streams[streamIdx++]);
866                     }
867                 } else {
868                     mapStreamInfo(streamInfo,
869                             static_cast<camera_stream_rotation_t> (it.getRotation()),
870                             physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
871                 }
872                 isStreamInfoValid = true;
873             }
874         }
875     }
876     return binder::Status::ok();
877 }
878 
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const String8 & physicalCameraId,const String8 & logicalCameraId)879 binder::Status checkPhysicalCameraId(
880         const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
881         const String8 &logicalCameraId) {
882     if (physicalCameraId.size() == 0) {
883         return binder::Status::ok();
884     }
885     if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
886         physicalCameraId.string()) == physicalCameraIds.end()) {
887         String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
888                 logicalCameraId.string(), physicalCameraId.string());
889         ALOGE("%s: %s", __FUNCTION__, msg.string());
890         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
891     }
892     return binder::Status::ok();
893 }
894 
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType)895 binder::Status checkSurfaceType(size_t numBufferProducers,
896         bool deferredConsumer, int surfaceType)  {
897     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
898         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
899                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
900         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
901     } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
902         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
903         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
904     }
905 
906     bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
907             (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
908 
909     if (deferredConsumer && !validSurfaceType) {
910         ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
911         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
912     }
913 
914     return binder::Status::ok();
915 }
916 
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const String8 & cameraId)917 binder::Status checkOperatingMode(int operatingMode,
918         const CameraMetadata &staticInfo, const String8 &cameraId) {
919     if (operatingMode < 0) {
920         String8 msg = String8::format(
921             "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
922         ALOGE("%s: %s", __FUNCTION__, msg.string());
923         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
924                 msg.string());
925     }
926 
927     bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
928     if (isConstrainedHighSpeed) {
929         camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
930         bool isConstrainedHighSpeedSupported = false;
931         for(size_t i = 0; i < entry.count; ++i) {
932             uint8_t capability = entry.data.u8[i];
933             if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
934                 isConstrainedHighSpeedSupported = true;
935                 break;
936             }
937         }
938         if (!isConstrainedHighSpeedSupported) {
939             String8 msg = String8::format(
940                 "Camera %s: Try to create a constrained high speed configuration on a device"
941                 " that doesn't support it.", cameraId.string());
942             ALOGE("%s: %s", __FUNCTION__, msg.string());
943             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
944                     msg.string());
945         }
946     }
947 
948     return binder::Status::ok();
949 }
950 
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)951 static bool inStreamConfigurationMap(int format, int width, int height,
952         const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
953     auto scs = sm.find(format);
954     if (scs == sm.end()) {
955         return false;
956     }
957     for (auto &sc : scs->second) {
958         if (sc.width == width && sc.height == height && sc.isInput == 0) {
959             return true;
960         }
961     }
962     return false;
963 }
964 
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)965 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
966     return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
967 }
968 
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)969 status_t checkAndOverrideSensorPixelModesUsed(
970         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
971         const CameraMetadata &staticInfo,
972         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
973 
974     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
975             convertToSet(sensorPixelModesUsed);
976     if (!supportsUltraHighResolutionCapture(staticInfo)) {
977         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
978                 sensorPixelModesUsedSet.end()) {
979             // invalid value for non ultra high res sensors
980             ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
981                     "support ultra high resolution capture", __FUNCTION__);
982             return BAD_VALUE;
983         }
984         overriddenSensorPixelModesUsed->clear();
985         overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
986         return OK;
987     }
988 
989     StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
990 
991     bool isInDefaultStreamConfigurationMap =
992             inStreamConfigurationMap(format, width, height,
993                     streamConfigurationPair.mDefaultStreamConfigurationMap);
994 
995     bool isInMaximumResolutionStreamConfigurationMap =
996             inStreamConfigurationMap(format, width, height,
997                     streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
998 
999     // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
1000     // size + format of the OutputConfiguration is found exclusively in 1.
1001     // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
1002     // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
1003     // This maintains backwards compatibility and also tells the framework the stream
1004     // might be used in either sensor pixel mode.
1005     if (sensorPixelModesUsedSet.size() == 0) {
1006         // Ambiguous case, override to include both cases.
1007         if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
1008             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1009             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1010             return OK;
1011         }
1012         if (isInMaximumResolutionStreamConfigurationMap) {
1013             overriddenSensorPixelModesUsed->insert(
1014                     ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
1015         } else {
1016             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
1017         }
1018         return OK;
1019     }
1020 
1021     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
1022     // are valid / err out.
1023     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
1024             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
1025         ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
1026                 " isn't present in default stream configuration map", __FUNCTION__, format, width,
1027                 height);
1028         return BAD_VALUE;
1029     }
1030 
1031    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
1032             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
1033         ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
1034                 "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
1035                 format, width, height);
1036         return BAD_VALUE;
1037     }
1038     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
1039     return OK;
1040 }
1041 
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)1042 bool targetPerfClassPrimaryCamera(
1043         const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
1044         int targetSdkVersion) {
1045     bool isPerfClassPrimaryCamera =
1046             perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
1047     return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
1048 }
1049 
1050 } // namespace SessionConfigurationUtils
1051 } // namespace camera3
1052 } // namespace android
1053