• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 #include <cutils/properties.h>
17 
18 #include "SessionConfigurationUtils.h"
19 #include "../api2/DepthCompositeStream.h"
20 #include "../api2/HeicCompositeStream.h"
21 #include "common/CameraDeviceBase.h"
22 #include "../CameraService.h"
23 #include "device3/Camera3Device.h"
24 #include "device3/Camera3OutputStream.h"
25 
26 using android::camera3::OutputStreamInfo;
27 using android::camera3::OutputStreamInfo;
28 using android::hardware::camera2::ICameraDeviceUser;
29 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
30 
31 namespace android {
32 namespace camera3 {
33 
34 int32_t SessionConfigurationUtils::PERF_CLASS_LEVEL =
35         property_get_int32("ro.odm.build.media_performance_class", 0);
36 
37 bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
38 
getMaxJpegResolution(const CameraMetadata & metadata,bool ultraHighResolution)39 camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
40         bool ultraHighResolution) {
41     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
42     const int STREAM_CONFIGURATION_SIZE = 4;
43     const int STREAM_FORMAT_OFFSET = 0;
44     const int STREAM_WIDTH_OFFSET = 1;
45     const int STREAM_HEIGHT_OFFSET = 2;
46     const int STREAM_IS_INPUT_OFFSET = 3;
47 
48     int32_t scalerSizesTag = ultraHighResolution ?
49             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
50                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
51     camera_metadata_ro_entry_t availableStreamConfigs =
52             metadata.find(scalerSizesTag);
53     if (availableStreamConfigs.count == 0 ||
54             availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
55         return camera3::Size(0, 0);
56     }
57 
58     // Get max jpeg size (area-wise).
59     for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
60         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
61         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
62         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
63         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
64         if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
65                 && format == HAL_PIXEL_FORMAT_BLOB &&
66                 (width * height > maxJpegWidth * maxJpegHeight)) {
67             maxJpegWidth = width;
68             maxJpegHeight = height;
69         }
70     }
71 
72     return camera3::Size(maxJpegWidth, maxJpegHeight);
73 }
74 
getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,camera3::Size defaultMaxJpegSize,size_t defaultMaxJpegBufferSize)75 size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
76         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
77     return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
78             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
79 }
80 
getStreamConfigurations(const CameraMetadata & staticInfo,int configuration,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)81 void StreamConfiguration::getStreamConfigurations(
82         const CameraMetadata &staticInfo, int configuration,
83         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
84     if (scm == nullptr) {
85         ALOGE("%s: StreamConfigurationMap nullptr", __FUNCTION__);
86         return;
87     }
88     const int STREAM_FORMAT_OFFSET = 0;
89     const int STREAM_WIDTH_OFFSET = 1;
90     const int STREAM_HEIGHT_OFFSET = 2;
91     const int STREAM_IS_INPUT_OFFSET = 3;
92 
93     camera_metadata_ro_entry availableStreamConfigs = staticInfo.find(configuration);
94     for (size_t i = 0; i < availableStreamConfigs.count; i += 4) {
95         int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
96         int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
97         int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
98         int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
99         StreamConfiguration sc = {format, width, height, isInput};
100         (*scm)[format].push_back(sc);
101     }
102 }
103 
getStreamConfigurations(const CameraMetadata & staticInfo,bool maxRes,std::unordered_map<int,std::vector<StreamConfiguration>> * scm)104 void StreamConfiguration::getStreamConfigurations(
105         const CameraMetadata &staticInfo, bool maxRes,
106         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
107     int32_t scalerKey =
108             SessionConfigurationUtils::getAppropriateModeTag(
109                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxRes);
110 
111     int32_t depthKey =
112             SessionConfigurationUtils::getAppropriateModeTag(
113                     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxRes);
114 
115     int32_t dynamicDepthKey =
116             SessionConfigurationUtils::getAppropriateModeTag(
117                     ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
118 
119     int32_t heicKey =
120             SessionConfigurationUtils::getAppropriateModeTag(
121                     ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
122 
123     getStreamConfigurations(staticInfo, scalerKey, scm);
124     getStreamConfigurations(staticInfo, depthKey, scm);
125     getStreamConfigurations(staticInfo, dynamicDepthKey, scm);
126     getStreamConfigurations(staticInfo, heicKey, scm);
127 }
128 
getAppropriateModeTag(int32_t defaultTag,bool maxResolution)129 int32_t SessionConfigurationUtils::getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
130     if (!maxResolution) {
131         return defaultTag;
132     }
133     switch (defaultTag) {
134         case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
135             return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
136         case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
137             return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
138         case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
139             return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
140         case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
141             return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
142         case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
143             return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
144         case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
145             return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
146         case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
147             return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
148         case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
149             return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
150         case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
151             return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
152         case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
153             return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
154         case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
155             return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
156         case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
157             return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
158         case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
159             return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
160         case ANDROID_LENS_INTRINSIC_CALIBRATION:
161             return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
162         case ANDROID_LENS_DISTORTION:
163             return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
164         default:
165             ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
166                     defaultTag);
167             return -1;
168     }
169     return -1;
170 }
171 
getArrayWidthAndHeight(const CameraMetadata * deviceInfo,int32_t arrayTag,int32_t * width,int32_t * height)172 bool SessionConfigurationUtils::getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
173         int32_t arrayTag, int32_t *width, int32_t *height) {
174     if (width == nullptr || height == nullptr) {
175         ALOGE("%s: width / height nullptr", __FUNCTION__);
176         return false;
177     }
178     camera_metadata_ro_entry_t entry;
179     entry = deviceInfo->find(arrayTag);
180     if (entry.count != 4) return false;
181     *width = entry.data.i32[2];
182     *height = entry.data.i32[3];
183     return true;
184 }
185 
186 StreamConfigurationPair
getStreamConfigurationPair(const CameraMetadata & staticInfo)187 SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
188     camera3::StreamConfigurationPair streamConfigurationPair;
189     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
190             &streamConfigurationPair.mDefaultStreamConfigurationMap);
191     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, true,
192             &streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
193     return streamConfigurationPair;
194 }
195 
euclidDistSquare(int32_t x0,int32_t y0,int32_t x1,int32_t y1)196 int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
197     int64_t d0 = x0 - x1;
198     int64_t d1 = y0 - y1;
199     return d0 * d0 + d1 * d1;
200 }
201 
roundBufferDimensionNearest(int32_t width,int32_t height,int32_t format,android_dataspace dataSpace,const CameraMetadata & info,bool maxResolution,int32_t * outWidth,int32_t * outHeight)202 bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
203         int32_t format, android_dataspace dataSpace,
204         const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
205         /*out*/int32_t* outHeight) {
206     const int32_t depthSizesTag =
207             getAppropriateModeTag(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
208                     maxResolution);
209     const int32_t scalerSizesTag =
210             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
211     const int32_t heicSizesTag =
212             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
213 
214     camera_metadata_ro_entry streamConfigs =
215             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
216             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
217             info.find(heicSizesTag) :
218             info.find(scalerSizesTag);
219 
220     int32_t bestWidth = -1;
221     int32_t bestHeight = -1;
222 
223     // Iterate through listed stream configurations and find the one with the smallest euclidean
224     // distance from the given dimensions for the given format.
225     for (size_t i = 0; i < streamConfigs.count; i += 4) {
226         int32_t fmt = streamConfigs.data.i32[i];
227         int32_t w = streamConfigs.data.i32[i + 1];
228         int32_t h = streamConfigs.data.i32[i + 2];
229 
230         // Ignore input/output type for now
231         if (fmt == format) {
232             if (w == width && h == height) {
233                 bestWidth = width;
234                 bestHeight = height;
235                 break;
236             } else if (w <= ROUNDING_WIDTH_CAP && (bestWidth == -1 ||
237                     SessionConfigurationUtils::euclidDistSquare(w, h, width, height) <
238                     SessionConfigurationUtils::euclidDistSquare(bestWidth, bestHeight, width,
239                             height))) {
240                 bestWidth = w;
241                 bestHeight = h;
242             }
243         }
244     }
245 
246     if (bestWidth == -1) {
247         // Return false if no configurations for this format were listed
248         return false;
249     }
250 
251     // Set the outputs to the closet width/height
252     if (outWidth != NULL) {
253         *outWidth = bestWidth;
254     }
255     if (outHeight != NULL) {
256         *outHeight = bestHeight;
257     }
258 
259     // Return true if at least one configuration for this format was listed
260     return true;
261 }
262 
isPublicFormat(int32_t format)263 bool SessionConfigurationUtils::isPublicFormat(int32_t format)
264 {
265     switch(format) {
266         case HAL_PIXEL_FORMAT_RGBA_8888:
267         case HAL_PIXEL_FORMAT_RGBX_8888:
268         case HAL_PIXEL_FORMAT_RGB_888:
269         case HAL_PIXEL_FORMAT_RGB_565:
270         case HAL_PIXEL_FORMAT_BGRA_8888:
271         case HAL_PIXEL_FORMAT_YV12:
272         case HAL_PIXEL_FORMAT_Y8:
273         case HAL_PIXEL_FORMAT_Y16:
274         case HAL_PIXEL_FORMAT_RAW16:
275         case HAL_PIXEL_FORMAT_RAW10:
276         case HAL_PIXEL_FORMAT_RAW12:
277         case HAL_PIXEL_FORMAT_RAW_OPAQUE:
278         case HAL_PIXEL_FORMAT_BLOB:
279         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
280         case HAL_PIXEL_FORMAT_YCbCr_420_888:
281         case HAL_PIXEL_FORMAT_YCbCr_422_SP:
282         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
283         case HAL_PIXEL_FORMAT_YCbCr_422_I:
284             return true;
285         default:
286             return false;
287     }
288 }
289 
createSurfaceFromGbp(OutputStreamInfo & streamInfo,bool isStreamInfoValid,sp<Surface> & surface,const sp<IGraphicBufferProducer> & gbp,const String8 & logicalCameraId,const CameraMetadata & physicalCameraMetadata,const std::vector<int32_t> & sensorPixelModesUsed)290 binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
291         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
292         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
293         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
294         const std::vector<int32_t> &sensorPixelModesUsed){
295     // bufferProducer must be non-null
296     if (gbp == nullptr) {
297         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
298         ALOGW("%s: %s", __FUNCTION__, msg.string());
299         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
300     }
301     // HACK b/10949105
302     // Query consumer usage bits to set async operation mode for
303     // GLConsumer using controlledByApp parameter.
304     bool useAsync = false;
305     uint64_t consumerUsage = 0;
306     status_t err;
307     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
308         String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
309                 logicalCameraId.string(), strerror(-err), err);
310         ALOGE("%s: %s", __FUNCTION__, msg.string());
311         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
312     }
313     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
314         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
315                 "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
316         useAsync = true;
317     }
318 
319     uint64_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
320                               GRALLOC_USAGE_RENDERSCRIPT;
321     uint64_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
322                            GraphicBuffer::USAGE_HW_TEXTURE |
323                            GraphicBuffer::USAGE_HW_COMPOSER;
324     bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
325             (consumerUsage & allowedFlags) != 0;
326 
327     surface = new Surface(gbp, useAsync);
328     ANativeWindow *anw = surface.get();
329 
330     int width, height, format;
331     android_dataspace dataSpace;
332     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
333         String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
334                  logicalCameraId.string(), strerror(-err), err);
335         ALOGE("%s: %s", __FUNCTION__, msg.string());
336         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
337     }
338     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
339         String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
340                 logicalCameraId.string(), strerror(-err), err);
341         ALOGE("%s: %s", __FUNCTION__, msg.string());
342         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
343     }
344     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
345         String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
346                 logicalCameraId.string(), strerror(-err), err);
347         ALOGE("%s: %s", __FUNCTION__, msg.string());
348         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
349     }
350     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
351             reinterpret_cast<int*>(&dataSpace))) != OK) {
352         String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
353                 logicalCameraId.string(), strerror(-err), err);
354         ALOGE("%s: %s", __FUNCTION__, msg.string());
355         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
356     }
357 
358     // FIXME: remove this override since the default format should be
359     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
360     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
361             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
362              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
363         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
364                 __FUNCTION__, logicalCameraId.string(), format);
365         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
366     }
367     std::unordered_set<int32_t> overriddenSensorPixelModes;
368     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
369             physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
370         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
371                 "format %#x are not valid",logicalCameraId.string(), format);
372         ALOGE("%s: %s", __FUNCTION__, msg.string());
373         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
374     }
375     bool foundInMaxRes = false;
376     if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
377             overriddenSensorPixelModes.end()) {
378         // we can use the default stream configuration map
379         foundInMaxRes = true;
380     }
381     // Round dimensions to the nearest dimensions available for this format
382     if (flexibleConsumer && isPublicFormat(format) &&
383             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
384             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
385             /*out*/&height)) {
386         String8 msg = String8::format("Camera %s: No supported stream configurations with "
387                 "format %#x defined, failed to create output stream",
388                 logicalCameraId.string(), format);
389         ALOGE("%s: %s", __FUNCTION__, msg.string());
390         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
391     }
392 
393     if (!isStreamInfoValid) {
394         streamInfo.width = width;
395         streamInfo.height = height;
396         streamInfo.format = format;
397         streamInfo.dataSpace = dataSpace;
398         streamInfo.consumerUsage = consumerUsage;
399         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
400         return binder::Status::ok();
401     }
402     if (width != streamInfo.width) {
403         String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
404                 logicalCameraId.string(), width, streamInfo.width);
405         ALOGE("%s: %s", __FUNCTION__, msg.string());
406         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
407     }
408     if (height != streamInfo.height) {
409         String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
410                  logicalCameraId.string(), height, streamInfo.height);
411         ALOGE("%s: %s", __FUNCTION__, msg.string());
412         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
413     }
414     if (format != streamInfo.format) {
415         String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
416                  logicalCameraId.string(), format, streamInfo.format);
417         ALOGE("%s: %s", __FUNCTION__, msg.string());
418         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
419     }
420     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
421         if (dataSpace != streamInfo.dataSpace) {
422             String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
423                     logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
424             ALOGE("%s: %s", __FUNCTION__, msg.string());
425             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
426         }
427         //At the native side, there isn't a way to check whether 2 surfaces come from the same
428         //surface class type. Use usage flag to approximate the comparison.
429         if (consumerUsage != streamInfo.consumerUsage) {
430             String8 msg = String8::format(
431                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
432                     logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
433             ALOGE("%s: %s", __FUNCTION__, msg.string());
434             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
435         }
436     }
437     return binder::Status::ok();
438 }
439 
mapStreamInfo(const OutputStreamInfo & streamInfo,camera3::camera_stream_rotation_t rotation,String8 physicalId,int32_t groupId,hardware::camera::device::V3_7::Stream * stream)440 void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
441             camera3::camera_stream_rotation_t rotation, String8 physicalId,
442             int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
443     if (stream == nullptr) {
444         return;
445     }
446 
447     stream->v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
448     stream->v3_4.v3_2.width = streamInfo.width;
449     stream->v3_4.v3_2.height = streamInfo.height;
450     stream->v3_4.v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
451     auto u = streamInfo.consumerUsage;
452     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
453     stream->v3_4.v3_2.usage = Camera3Device::mapToConsumerUsage(u);
454     stream->v3_4.v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
455     stream->v3_4.v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
456     stream->v3_4.v3_2.id = -1; // Invalid stream id
457     stream->v3_4.physicalCameraId = std::string(physicalId.string());
458     stream->v3_4.bufferSize = 0;
459     stream->groupId = groupId;
460     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
461     size_t idx = 0;
462     for (auto mode : streamInfo.sensorPixelModesUsed) {
463         stream->sensorPixelModesUsed[idx++] =
464                 static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
465     }
466 }
467 
checkPhysicalCameraId(const std::vector<std::string> & physicalCameraIds,const String8 & physicalCameraId,const String8 & logicalCameraId)468 binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
469         const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
470         const String8 &logicalCameraId) {
471     if (physicalCameraId.size() == 0) {
472         return binder::Status::ok();
473     }
474     if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
475         physicalCameraId.string()) == physicalCameraIds.end()) {
476         String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
477                 logicalCameraId.string(), physicalCameraId.string());
478         ALOGE("%s: %s", __FUNCTION__, msg.string());
479         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
480     }
481     return binder::Status::ok();
482 }
483 
checkSurfaceType(size_t numBufferProducers,bool deferredConsumer,int surfaceType)484 binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers,
485         bool deferredConsumer, int surfaceType)  {
486     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
487         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
488                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
489         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
490     } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
491         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
492         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
493     }
494 
495     bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
496             (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
497 
498     if (deferredConsumer && !validSurfaceType) {
499         ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
500         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
501     }
502 
503     return binder::Status::ok();
504 }
505 
checkOperatingMode(int operatingMode,const CameraMetadata & staticInfo,const String8 & cameraId)506 binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode,
507         const CameraMetadata &staticInfo, const String8 &cameraId) {
508     if (operatingMode < 0) {
509         String8 msg = String8::format(
510             "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
511         ALOGE("%s: %s", __FUNCTION__, msg.string());
512         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
513                 msg.string());
514     }
515 
516     bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
517     if (isConstrainedHighSpeed) {
518         camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
519         bool isConstrainedHighSpeedSupported = false;
520         for(size_t i = 0; i < entry.count; ++i) {
521             uint8_t capability = entry.data.u8[i];
522             if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
523                 isConstrainedHighSpeedSupported = true;
524                 break;
525             }
526         }
527         if (!isConstrainedHighSpeedSupported) {
528             String8 msg = String8::format(
529                 "Camera %s: Try to create a constrained high speed configuration on a device"
530                 " that doesn't support it.", cameraId.string());
531             ALOGE("%s: %s", __FUNCTION__, msg.string());
532             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
533                     msg.string());
534         }
535     }
536 
537     return binder::Status::ok();
538 }
539 
540 binder::Status
convertToHALStreamCombination(const SessionConfiguration & sessionConfiguration,const String8 & logicalCameraId,const CameraMetadata & deviceInfo,metadataGetter getMetadata,const std::vector<std::string> & physicalCameraIds,hardware::camera::device::V3_7::StreamConfiguration & streamConfiguration,bool overrideForPerfClass,bool * earlyExit)541 SessionConfigurationUtils::convertToHALStreamCombination(
542         const SessionConfiguration& sessionConfiguration,
543         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
544         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
545         hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
546         bool overrideForPerfClass, bool *earlyExit) {
547 
548     auto operatingMode = sessionConfiguration.getOperatingMode();
549     binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
550     if (!res.isOk()) {
551         return res;
552     }
553 
554     if (earlyExit == nullptr) {
555         String8 msg("earlyExit nullptr");
556         ALOGE("%s: %s", __FUNCTION__, msg.string());
557         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
558     }
559     *earlyExit = false;
560     auto ret = Camera3Device::mapToStreamConfigurationMode(
561             static_cast<camera_stream_configuration_mode_t> (operatingMode),
562             /*out*/ &streamConfiguration.operationMode);
563     if (ret != OK) {
564         String8 msg = String8::format(
565             "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
566             logicalCameraId.string(), operatingMode, strerror(-ret), ret);
567         ALOGE("%s: %s", __FUNCTION__, msg.string());
568         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
569                 msg.string());
570     }
571 
572     bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
573             (sessionConfiguration.getInputHeight() > 0) &&
574             (sessionConfiguration.getInputFormat() > 0);
575     auto outputConfigs = sessionConfiguration.getOutputConfigurations();
576     size_t streamCount = outputConfigs.size();
577     streamCount = isInputValid ? streamCount + 1 : streamCount;
578     streamConfiguration.streams.resize(streamCount);
579     size_t streamIdx = 0;
580     if (isInputValid) {
581         hardware::hidl_vec<CameraMetadataEnumAndroidSensorPixelMode> defaultSensorPixelModes;
582         defaultSensorPixelModes.resize(1);
583         defaultSensorPixelModes[0] =
584                 static_cast<CameraMetadataEnumAndroidSensorPixelMode>(
585                         ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
586         streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
587                 hardware::camera::device::V3_2::StreamType::INPUT,
588                 static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
589                 static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
590                 Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
591                 /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
592                 hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
593                 /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1, defaultSensorPixelModes};
594         streamConfiguration.multiResolutionInputImage =
595                 sessionConfiguration.inputIsMultiResolution();
596     }
597 
598     for (const auto &it : outputConfigs) {
599         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
600             it.getGraphicBufferProducers();
601         bool deferredConsumer = it.isDeferred();
602         String8 physicalCameraId = String8(it.getPhysicalCameraId());
603 
604         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
605         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
606                 overrideForPerfClass);
607         const CameraMetadata &metadataChosen =
608                 physicalCameraId.size() > 0 ? physicalDeviceInfo : deviceInfo;
609 
610         size_t numBufferProducers = bufferProducers.size();
611         bool isStreamInfoValid = false;
612         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
613         OutputStreamInfo streamInfo;
614 
615         res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
616         if (!res.isOk()) {
617             return res;
618         }
619         res = checkPhysicalCameraId(physicalCameraIds, physicalCameraId,
620                 logicalCameraId);
621         if (!res.isOk()) {
622             return res;
623         }
624 
625         if (deferredConsumer) {
626             streamInfo.width = it.getWidth();
627             streamInfo.height = it.getHeight();
628             streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
629             streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
630             auto surfaceType = it.getSurfaceType();
631             streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
632             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
633                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
634             }
635             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
636                     streamInfo.format, streamInfo.width,
637                     streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
638                     &streamInfo.sensorPixelModesUsed) != OK) {
639                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
640                                 __FUNCTION__);
641                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
642                                 "Deferred surface sensor pixel modes not valid");
643             }
644             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
645                     &streamConfiguration.streams[streamIdx++]);
646             isStreamInfoValid = true;
647 
648             if (numBufferProducers == 0) {
649                 continue;
650             }
651         }
652 
653         for (auto& bufferProducer : bufferProducers) {
654             sp<Surface> surface;
655             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
656                     logicalCameraId, metadataChosen, sensorPixelModesUsed);
657 
658             if (!res.isOk())
659                 return res;
660 
661             if (!isStreamInfoValid) {
662                 bool isDepthCompositeStream =
663                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
664                 bool isHeicCompositeStream =
665                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
666                 if (isDepthCompositeStream || isHeicCompositeStream) {
667                     // We need to take in to account that composite streams can have
668                     // additional internal camera streams.
669                     std::vector<OutputStreamInfo> compositeStreams;
670                     if (isDepthCompositeStream) {
671                       // TODO: Take care of composite streams.
672                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
673                                 deviceInfo, &compositeStreams);
674                     } else {
675                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
676                             deviceInfo, &compositeStreams);
677                     }
678                     if (ret != OK) {
679                         String8 msg = String8::format(
680                                 "Camera %s: Failed adding composite streams: %s (%d)",
681                                 logicalCameraId.string(), strerror(-ret), ret);
682                         ALOGE("%s: %s", __FUNCTION__, msg.string());
683                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
684                     }
685 
686                     if (compositeStreams.size() == 0) {
687                         // No internal streams means composite stream not
688                         // supported.
689                         *earlyExit = true;
690                         return binder::Status::ok();
691                     } else if (compositeStreams.size() > 1) {
692                         streamCount += compositeStreams.size() - 1;
693                         streamConfiguration.streams.resize(streamCount);
694                     }
695 
696                     for (const auto& compositeStream : compositeStreams) {
697                         mapStreamInfo(compositeStream,
698                                 static_cast<camera_stream_rotation_t> (it.getRotation()),
699                                 physicalCameraId, groupId,
700                                 &streamConfiguration.streams[streamIdx++]);
701                     }
702                 } else {
703                     mapStreamInfo(streamInfo,
704                             static_cast<camera_stream_rotation_t> (it.getRotation()),
705                             physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
706                 }
707                 isStreamInfoValid = true;
708             }
709         }
710     }
711     return binder::Status::ok();
712 }
713 
inStreamConfigurationMap(int format,int width,int height,const std::unordered_map<int,std::vector<camera3::StreamConfiguration>> & sm)714 static bool inStreamConfigurationMap(int format, int width, int height,
715         const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
716     auto scs = sm.find(format);
717     if (scs == sm.end()) {
718         return false;
719     }
720     for (auto &sc : scs->second) {
721         if (sc.width == width && sc.height == height && sc.isInput == 0) {
722             return true;
723         }
724     }
725     return false;
726 }
727 
convertToSet(const std::vector<int32_t> & sensorPixelModesUsed)728 static std::unordered_set<int32_t> convertToSet(const std::vector<int32_t> &sensorPixelModesUsed) {
729     return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
730 }
731 
checkAndOverrideSensorPixelModesUsed(const std::vector<int32_t> & sensorPixelModesUsed,int format,int width,int height,const CameraMetadata & staticInfo,bool flexibleConsumer,std::unordered_set<int32_t> * overriddenSensorPixelModesUsed)732 status_t SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
733         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
734         const CameraMetadata &staticInfo, bool flexibleConsumer,
735         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
736 
737     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
738             convertToSet(sensorPixelModesUsed);
739     if (!isUltraHighResolutionSensor(staticInfo)) {
740         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
741                 sensorPixelModesUsedSet.end()) {
742             // invalid value for non ultra high res sensors
743             return BAD_VALUE;
744         }
745         overriddenSensorPixelModesUsed->clear();
746         overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
747         return OK;
748     }
749 
750     StreamConfigurationPair streamConfigurationPair = getStreamConfigurationPair(staticInfo);
751 
752     bool isInDefaultStreamConfigurationMap =
753             inStreamConfigurationMap(format, width, height,
754                     streamConfigurationPair.mDefaultStreamConfigurationMap);
755 
756     bool isInMaximumResolutionStreamConfigurationMap =
757             inStreamConfigurationMap(format, width, height,
758                     streamConfigurationPair.mMaximumResolutionStreamConfigurationMap);
759 
760     // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
761     // size + format of the OutputConfiguration is found exclusively in 1.
762     // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
763     // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
764     // compatibility.
765     if (sensorPixelModesUsedSet.size() == 0) {
766         // Ambiguous case, default to only 'DEFAULT' mode.
767         if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
768             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
769             return OK;
770         }
771         // We don't allow flexible consumer for max resolution mode.
772         if (isInMaximumResolutionStreamConfigurationMap) {
773             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
774             return OK;
775         }
776         if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
777             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
778             return OK;
779         }
780         return BAD_VALUE;
781     }
782 
783     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
784     // are valid / err out.
785     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
786             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
787         return BAD_VALUE;
788     }
789 
790    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
791             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
792         return BAD_VALUE;
793     }
794     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
795     return OK;
796 }
797 
isUltraHighResolutionSensor(const CameraMetadata & deviceInfo)798 bool SessionConfigurationUtils::isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
799     camera_metadata_ro_entry_t entryCap;
800     entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
801     // Go through the capabilities and check if it has
802     // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
803     for (size_t i = 0; i < entryCap.count; ++i) {
804         uint8_t capability = entryCap.data.u8[i];
805         if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
806             return true;
807         }
808     }
809     return false;
810 }
811 
convertHALStreamCombinationFromV37ToV34(hardware::camera::device::V3_4::StreamConfiguration & streamConfigV34,const hardware::camera::device::V3_7::StreamConfiguration & streamConfigV37)812 bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
813         hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
814         const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
815     if (streamConfigV37.multiResolutionInputImage) {
816         // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
817         return false;
818     }
819 
820     streamConfigV34.streams.resize(streamConfigV37.streams.size());
821     for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
822         if (streamConfigV37.streams[i].groupId != -1) {
823             // ICameraDevice older than 3.7 doesn't support multi-resolution output
824             // image
825             return false;
826         }
827         streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
828     }
829     streamConfigV34.operationMode = streamConfigV37.operationMode;
830     streamConfigV34.sessionParams = streamConfigV37.sessionParams;
831 
832     return true;
833 }
834 
targetPerfClassPrimaryCamera(const std::set<std::string> & perfClassPrimaryCameraIds,const std::string & cameraId,int targetSdkVersion)835 bool SessionConfigurationUtils::targetPerfClassPrimaryCamera(
836         const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
837         int targetSdkVersion) {
838     bool isPerfClassPrimaryCamera =
839             perfClassPrimaryCameraIds.find(cameraId) != perfClassPrimaryCameraIds.end();
840     return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
841 }
842 
843 } // namespace camera3
844 } // namespace android
845