1 /*
2 * Copyright 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraDevice"
19 #include "VirtualCameraDevice.h"
20
21 #include <algorithm>
22 #include <array>
23 #include <chrono>
24 #include <cstdint>
25 #include <iterator>
26 #include <numeric>
27 #include <optional>
28 #include <string>
29 #include <vector>
30
31 #include "VirtualCameraService.h"
32 #include "VirtualCameraSession.h"
33 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
34 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
35 #include "aidl/android/hardware/camera/common/Status.h"
36 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
37 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
38 #include "android/binder_auto_utils.h"
39 #include "android/binder_status.h"
40 #include "log/log.h"
41 #include "system/camera_metadata.h"
42 #include "util/MetadataUtil.h"
43 #include "util/Util.h"
44
45 namespace android {
46 namespace companion {
47 namespace virtualcamera {
48
49 using ::aidl::android::companion::virtualcamera::Format;
50 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
51 using ::aidl::android::companion::virtualcamera::LensFacing;
52 using ::aidl::android::companion::virtualcamera::SensorOrientation;
53 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
54 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
55 using ::aidl::android::hardware::camera::common::CameraResourceCost;
56 using ::aidl::android::hardware::camera::common::Status;
57 using ::aidl::android::hardware::camera::device::CameraMetadata;
58 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
59 using ::aidl::android::hardware::camera::device::ICameraDeviceSession;
60 using ::aidl::android::hardware::camera::device::ICameraInjectionSession;
61 using ::aidl::android::hardware::camera::device::Stream;
62 using ::aidl::android::hardware::camera::device::StreamConfiguration;
63 using ::aidl::android::hardware::camera::device::StreamRotation;
64 using ::aidl::android::hardware::camera::device::StreamType;
65 using ::aidl::android::hardware::graphics::common::PixelFormat;
66
67 namespace {
68
69 using namespace std::chrono_literals;
70
71 // Prefix of camera name - "device@1.1/virtual/{camera_id}"
72 const char* kDevicePathPrefix = "device@1.1/virtual/";
73
74 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
75
76 constexpr std::chrono::nanoseconds kMaxFrameDuration =
77 std::chrono::duration_cast<std::chrono::nanoseconds>(
78 1e9ns / VirtualCameraDevice::kMinFps);
79
80 constexpr uint8_t kPipelineMaxDepth = 2;
81
82 constexpr int k30Fps = 30;
83
84 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
85
86 const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
87 Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
88 Resolution(240, 160), Resolution(240, 180)};
89
90 const std::array<PixelFormat, 3> kOutputFormats{
91 PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
92 PixelFormat::BLOB};
93
94 // The resolutions below will used to extend the set of supported output formats.
95 // All resolutions with lower pixel count and same aspect ratio as some supported
96 // input resolution will be added to the set of supported output resolutions.
97 const std::array<Resolution, 10> kOutputResolutions{
98 Resolution(320, 240), Resolution(640, 360), Resolution(640, 480),
99 Resolution(720, 480), Resolution(720, 576), Resolution(800, 600),
100 Resolution(1024, 576), Resolution(1280, 720), Resolution(1280, 960),
101 Resolution(1280, 1080),
102 };
103
getSupportedJpegThumbnailSizes(const std::vector<SupportedStreamConfiguration> & configs)104 std::vector<Resolution> getSupportedJpegThumbnailSizes(
105 const std::vector<SupportedStreamConfiguration>& configs) {
106 auto isSupportedByAnyInputConfig =
107 [&configs](const Resolution thumbnailResolution) {
108 return std::any_of(
109 configs.begin(), configs.end(),
110 [thumbnailResolution](const SupportedStreamConfiguration& config) {
111 return isApproximatellySameAspectRatio(
112 thumbnailResolution, Resolution(config.width, config.height));
113 });
114 };
115
116 std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
117 std::copy_if(kStandardJpegThumbnailSizes.begin(),
118 kStandardJpegThumbnailSizes.end(),
119 std::back_insert_iterator(supportedThumbnailSizes),
120 isSupportedByAnyInputConfig);
121 return supportedThumbnailSizes;
122 }
123
isSupportedOutputFormat(const PixelFormat pixelFormat)124 bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
125 return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
126 kOutputFormats.end();
127 }
128
fpsRangesForInputConfig(const std::vector<SupportedStreamConfiguration> & configs)129 std::vector<FpsRange> fpsRangesForInputConfig(
130 const std::vector<SupportedStreamConfiguration>& configs) {
131 std::set<FpsRange> availableRanges;
132
133 for (const SupportedStreamConfiguration& config : configs) {
134 availableRanges.insert(
135 {.minFps = VirtualCameraDevice::kMinFps, .maxFps = config.maxFps});
136 availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
137 }
138
139 if (std::any_of(configs.begin(), configs.end(),
140 [](const SupportedStreamConfiguration& config) {
141 return config.maxFps >= k30Fps;
142 })) {
143 // Extend the set of available ranges with (minFps <= 15, 30) & (30, 30) as
144 // required by CDD.
145 availableRanges.insert(
146 {.minFps = VirtualCameraDevice::kMinFps, .maxFps = k30Fps});
147 availableRanges.insert({.minFps = k30Fps, .maxFps = k30Fps});
148 }
149
150 return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
151 }
152
getMaxResolution(const std::vector<SupportedStreamConfiguration> & configs)153 std::optional<Resolution> getMaxResolution(
154 const std::vector<SupportedStreamConfiguration>& configs) {
155 auto itMax = std::max_element(configs.begin(), configs.end(),
156 [](const SupportedStreamConfiguration& a,
157 const SupportedStreamConfiguration& b) {
158 return a.width * b.height < a.width * b.height;
159 });
160 if (itMax == configs.end()) {
161 ALOGE(
162 "%s: empty vector of supported configurations, cannot find largest "
163 "resolution.",
164 __func__);
165 return std::nullopt;
166 }
167
168 return Resolution(itMax->width, itMax->height);
169 }
170
171 // Returns a map of unique resolution to maximum maxFps for all streams with
172 // that resolution.
getResolutionToMaxFpsMap(const std::vector<SupportedStreamConfiguration> & configs)173 std::map<Resolution, int> getResolutionToMaxFpsMap(
174 const std::vector<SupportedStreamConfiguration>& configs) {
175 std::map<Resolution, int> resolutionToMaxFpsMap;
176
177 for (const SupportedStreamConfiguration& config : configs) {
178 Resolution resolution(config.width, config.height);
179 if (resolutionToMaxFpsMap.find(resolution) == resolutionToMaxFpsMap.end()) {
180 resolutionToMaxFpsMap[resolution] = config.maxFps;
181 } else {
182 int currentMaxFps = resolutionToMaxFpsMap[resolution];
183 resolutionToMaxFpsMap[resolution] = std::max(currentMaxFps, config.maxFps);
184 }
185 }
186
187 std::map<Resolution, int> additionalResolutionToMaxFpsMap;
188 // Add additional resolutions we can support by downscaling input streams with
189 // same aspect ratio.
190 for (const Resolution& outputResolution : kOutputResolutions) {
191 for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
192 if (resolutionToMaxFpsMap.find(outputResolution) !=
193 resolutionToMaxFpsMap.end()) {
194 // Resolution is already in the map, skip it.
195 continue;
196 }
197
198 if (outputResolution < resolution &&
199 isApproximatellySameAspectRatio(outputResolution, resolution)) {
200 // Lower resolution with same aspect ratio, we can achieve this by
201 // downscaling, let's add it to the map.
202 ALOGD(
203 "Extending set of output resolutions with %dx%d which has same "
204 "aspect ratio as supported input %dx%d.",
205 outputResolution.width, outputResolution.height, resolution.width,
206 resolution.height);
207 additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
208 break;
209 }
210 }
211 }
212
213 // Add all resolution we can achieve by downscaling to the map.
214 resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
215 additionalResolutionToMaxFpsMap.end());
216
217 return resolutionToMaxFpsMap;
218 }
219
220 // TODO(b/301023410) - Populate camera characteristics according to camera configuration.
initCameraCharacteristics(const std::vector<SupportedStreamConfiguration> & supportedInputConfig,const SensorOrientation sensorOrientation,const LensFacing lensFacing,const int32_t deviceId)221 std::optional<CameraMetadata> initCameraCharacteristics(
222 const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
223 const SensorOrientation sensorOrientation, const LensFacing lensFacing,
224 const int32_t deviceId) {
225 if (!std::all_of(supportedInputConfig.begin(), supportedInputConfig.end(),
226 [](const SupportedStreamConfiguration& config) {
227 return isFormatSupportedForInput(
228 config.width, config.height, config.pixelFormat,
229 config.maxFps);
230 })) {
231 ALOGE("%s: input configuration contains unsupported format", __func__);
232 return std::nullopt;
233 }
234
235 MetadataBuilder builder =
236 MetadataBuilder()
237 .setSupportedHardwareLevel(
238 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
239 .setDeviceId(deviceId)
240 .setFlashAvailable(false)
241 .setLensFacing(
242 static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
243 .setAvailableFocalLengths({VirtualCameraDevice::kFocalLength})
244 .setSensorOrientation(static_cast<int32_t>(sensorOrientation))
245 .setSensorReadoutTimestamp(
246 ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED)
247 .setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
248 .setSensorPhysicalSize(36.0, 24.0)
249 .setAvailableAberrationCorrectionModes(
250 {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
251 .setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
252 .setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
253 .setAvailableStreamUseCases(
254 {ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
255 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
256 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
257 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
258 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
259 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL})
260 .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
261 .setAvailableMaxDigitalZoom(1.0)
262 .setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
263 .setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
264 .setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
265 .setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
266 .setControlAvailableVideoStabilizationModes(
267 {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
268 .setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
269 .setControlAeAvailableAntibandingModes(
270 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
271 .setControlAeAvailableFpsRanges(
272 fpsRangesForInputConfig(supportedInputConfig))
273 .setControlMaxRegions(0, 0, 0)
274 .setControlAfRegions({kDefaultEmptyControlRegion})
275 .setControlAeRegions({kDefaultEmptyControlRegion})
276 .setControlAwbRegions({kDefaultEmptyControlRegion})
277 .setControlAeCompensationRange(0, 0)
278 .setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
279 .setControlAwbLockAvailable(false)
280 .setControlAeLockAvailable(false)
281 .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
282 .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
283 .setCroppingType(ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY)
284 .setJpegAvailableThumbnailSizes(
285 getSupportedJpegThumbnailSizes(supportedInputConfig))
286 .setMaxJpegSize(kMaxJpegSize)
287 .setMaxFaceCount(0)
288 .setMaxFrameDuration(kMaxFrameDuration)
289 .setMaxNumberOutputStreams(
290 VirtualCameraDevice::kMaxNumberOfRawStreams,
291 VirtualCameraDevice::kMaxNumberOfProcessedStreams,
292 VirtualCameraDevice::kMaxNumberOfStallStreams)
293 .setRequestPartialResultCount(1)
294 .setPipelineMaxDepth(kPipelineMaxDepth)
295 .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
296 .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
297 ANDROID_CONTROL_CAPTURE_INTENT,
298 ANDROID_CONTROL_AE_MODE,
299 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
300 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
301 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
302 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
303 ANDROID_CONTROL_AF_TRIGGER,
304 ANDROID_CONTROL_AF_MODE,
305 ANDROID_CONTROL_AWB_MODE,
306 ANDROID_SCALER_CROP_REGION,
307 ANDROID_CONTROL_EFFECT_MODE,
308 ANDROID_CONTROL_MODE,
309 ANDROID_CONTROL_SCENE_MODE,
310 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
311 ANDROID_CONTROL_ZOOM_RATIO,
312 ANDROID_FLASH_MODE,
313 ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
314 ANDROID_JPEG_ORIENTATION,
315 ANDROID_JPEG_QUALITY,
316 ANDROID_JPEG_THUMBNAIL_QUALITY,
317 ANDROID_JPEG_THUMBNAIL_SIZE,
318 ANDROID_NOISE_REDUCTION_MODE,
319 ANDROID_STATISTICS_FACE_DETECT_MODE})
320 .setAvailableResultKeys({
321 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
322 ANDROID_CONTROL_AE_ANTIBANDING_MODE,
323 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
324 ANDROID_CONTROL_AE_LOCK,
325 ANDROID_CONTROL_AE_MODE,
326 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
327 ANDROID_CONTROL_AE_STATE,
328 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
329 ANDROID_CONTROL_AF_MODE,
330 ANDROID_CONTROL_AF_STATE,
331 ANDROID_CONTROL_AF_TRIGGER,
332 ANDROID_CONTROL_AWB_LOCK,
333 ANDROID_CONTROL_AWB_MODE,
334 ANDROID_CONTROL_AWB_STATE,
335 ANDROID_CONTROL_CAPTURE_INTENT,
336 ANDROID_CONTROL_EFFECT_MODE,
337 ANDROID_CONTROL_MODE,
338 ANDROID_CONTROL_SCENE_MODE,
339 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
340 ANDROID_STATISTICS_FACE_DETECT_MODE,
341 ANDROID_FLASH_MODE,
342 ANDROID_FLASH_STATE,
343 ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
344 ANDROID_JPEG_QUALITY,
345 ANDROID_JPEG_THUMBNAIL_QUALITY,
346 ANDROID_LENS_FOCAL_LENGTH,
347 ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
348 ANDROID_NOISE_REDUCTION_MODE,
349 ANDROID_REQUEST_PIPELINE_DEPTH,
350 ANDROID_SENSOR_TIMESTAMP,
351 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
352 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
353 ANDROID_STATISTICS_SCENE_FLICKER,
354 })
355 .setAvailableCapabilities(
356 {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
357
358 // Active array size must correspond to largest supported input resolution.
359 std::optional<Resolution> maxResolution =
360 getMaxResolution(supportedInputConfig);
361 if (!maxResolution.has_value()) {
362 return std::nullopt;
363 }
364 builder.setSensorActiveArraySize(0, 0, maxResolution->width,
365 maxResolution->height);
366 builder.setSensorPixelArraySize(maxResolution->width, maxResolution->height);
367
368 std::vector<MetadataBuilder::StreamConfiguration> outputConfigurations;
369
370 // TODO(b/301023410) Add also all "standard" resolutions we can rescale the
371 // streams to (all standard resolutions with same aspect ratio).
372
373 std::map<Resolution, int> resolutionToMaxFpsMap =
374 getResolutionToMaxFpsMap(supportedInputConfig);
375
376 // Add configurations for all unique input resolutions and output formats.
377 for (const PixelFormat format : kOutputFormats) {
378 std::transform(
379 resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
380 std::back_inserter(outputConfigurations), [format](const auto& entry) {
381 Resolution resolution = entry.first;
382 int maxFps = entry.second;
383 return MetadataBuilder::StreamConfiguration{
384 .width = resolution.width,
385 .height = resolution.height,
386 .format = static_cast<int32_t>(format),
387 .minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
388 .minStallDuration = 0s};
389 });
390 }
391
392 ALOGV("Adding %zu output configurations", outputConfigurations.size());
393 builder.setAvailableOutputStreamConfigurations(outputConfigurations);
394
395 auto metadata = builder.setAvailableCharacteristicKeys().build();
396 if (metadata == nullptr) {
397 ALOGE("Failed to build metadata!");
398 return CameraMetadata();
399 }
400
401 return std::move(*metadata);
402 }
403
404 } // namespace
405
VirtualCameraDevice(const std::string & cameraId,const VirtualCameraConfiguration & configuration,int32_t deviceId)406 VirtualCameraDevice::VirtualCameraDevice(
407 const std::string& cameraId,
408 const VirtualCameraConfiguration& configuration, int32_t deviceId)
409 : mCameraId(cameraId),
410 mVirtualCameraClientCallback(configuration.virtualCameraCallback),
411 mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
412 std::optional<CameraMetadata> metadata = initCameraCharacteristics(
413 mSupportedInputConfigurations, configuration.sensorOrientation,
414 configuration.lensFacing, deviceId);
415 if (metadata.has_value()) {
416 mCameraCharacteristics = *metadata;
417 } else {
418 ALOGE(
419 "%s: Failed to initialize camera characteristic based on provided "
420 "configuration.",
421 __func__);
422 }
423 }
424
getCameraCharacteristics(CameraMetadata * _aidl_return)425 ndk::ScopedAStatus VirtualCameraDevice::getCameraCharacteristics(
426 CameraMetadata* _aidl_return) {
427 ALOGV("%s", __func__);
428 if (_aidl_return == nullptr) {
429 return cameraStatus(Status::ILLEGAL_ARGUMENT);
430 }
431
432 *_aidl_return = mCameraCharacteristics;
433 return ndk::ScopedAStatus::ok();
434 }
435
getPhysicalCameraCharacteristics(const std::string & in_physicalCameraId,CameraMetadata * _aidl_return)436 ndk::ScopedAStatus VirtualCameraDevice::getPhysicalCameraCharacteristics(
437 const std::string& in_physicalCameraId, CameraMetadata* _aidl_return) {
438 ALOGV("%s: physicalCameraId %s", __func__, in_physicalCameraId.c_str());
439 (void)_aidl_return;
440
441 // VTS tests expect this call to fail with illegal argument status for
442 // all publicly advertised camera ids.
443 // Because we don't support physical camera ids, we just always
444 // fail with illegal argument (there's no valid argument to provide).
445 return cameraStatus(Status::ILLEGAL_ARGUMENT);
446 }
447
getResourceCost(CameraResourceCost * _aidl_return)448 ndk::ScopedAStatus VirtualCameraDevice::getResourceCost(
449 CameraResourceCost* _aidl_return) {
450 ALOGV("%s", __func__);
451 if (_aidl_return == nullptr) {
452 return cameraStatus(Status::ILLEGAL_ARGUMENT);
453 }
454 _aidl_return->resourceCost = 100; // ¯\_(ツ)_/¯
455 return ndk::ScopedAStatus::ok();
456 }
457
isStreamCombinationSupported(const StreamConfiguration & in_streams,bool * _aidl_return)458 ndk::ScopedAStatus VirtualCameraDevice::isStreamCombinationSupported(
459 const StreamConfiguration& in_streams, bool* _aidl_return) {
460 ALOGV("%s", __func__);
461
462 if (_aidl_return == nullptr) {
463 return cameraStatus(Status::ILLEGAL_ARGUMENT);
464 }
465
466 *_aidl_return = isStreamCombinationSupported(in_streams);
467 return ndk::ScopedAStatus::ok();
468 };
469
isStreamCombinationSupported(const StreamConfiguration & streamConfiguration) const470 bool VirtualCameraDevice::isStreamCombinationSupported(
471 const StreamConfiguration& streamConfiguration) const {
472 if (streamConfiguration.streams.empty()) {
473 ALOGE("%s: Querying empty configuration", __func__);
474 return false;
475 }
476
477 const std::vector<Stream>& streams = streamConfiguration.streams;
478
479 Resolution firstStreamResolution(streams[0].width, streams[0].height);
480 auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
481 return isApproximatellySameAspectRatio(
482 firstStreamResolution, Resolution(stream.width, stream.height));
483 };
484 if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
485 ALOGW(
486 "%s: Requested streams do not have same aspect ratio. Different aspect "
487 "ratios are currently "
488 "not supported by virtual camera. Stream configuration: %s",
489 __func__, streamConfiguration.toString().c_str());
490 return false;
491 }
492
493 int numberOfProcessedStreams = 0;
494 int numberOfStallStreams = 0;
495 for (const Stream& stream : streamConfiguration.streams) {
496 ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());
497
498 if (stream.streamType == StreamType::INPUT) {
499 ALOGW("%s: Input stream type is not supported", __func__);
500 return false;
501 }
502
503 if (stream.rotation != StreamRotation::ROTATION_0 ||
504 !isSupportedOutputFormat(stream.format)) {
505 ALOGV("Unsupported output stream type");
506 return false;
507 }
508
509 if (stream.format == PixelFormat::BLOB) {
510 numberOfStallStreams++;
511 } else {
512 numberOfProcessedStreams++;
513 }
514
515 Resolution requestedResolution(stream.width, stream.height);
516 auto matchesSupportedInputConfig =
517 [requestedResolution](const SupportedStreamConfiguration& config) {
518 Resolution supportedInputResolution(config.width, config.height);
519 return requestedResolution <= supportedInputResolution &&
520 isApproximatellySameAspectRatio(requestedResolution,
521 supportedInputResolution);
522 };
523 if (std::none_of(mSupportedInputConfigurations.begin(),
524 mSupportedInputConfigurations.end(),
525 matchesSupportedInputConfig)) {
526 ALOGV("Requested config doesn't match any supported input config");
527 return false;
528 }
529 }
530
531 if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
532 ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
533 __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
534 return false;
535 }
536
537 if (numberOfStallStreams > kMaxNumberOfStallStreams) {
538 ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
539 numberOfStallStreams, kMaxNumberOfStallStreams);
540 return false;
541 }
542
543 return true;
544 }
545
open(const std::shared_ptr<ICameraDeviceCallback> & in_callback,std::shared_ptr<ICameraDeviceSession> * _aidl_return)546 ndk::ScopedAStatus VirtualCameraDevice::open(
547 const std::shared_ptr<ICameraDeviceCallback>& in_callback,
548 std::shared_ptr<ICameraDeviceSession>* _aidl_return) {
549 ALOGV("%s", __func__);
550
551 *_aidl_return = ndk::SharedRefBase::make<VirtualCameraSession>(
552 sharedFromThis(), in_callback, mVirtualCameraClientCallback);
553
554 return ndk::ScopedAStatus::ok();
555 };
556
openInjectionSession(const std::shared_ptr<ICameraDeviceCallback> & in_callback,std::shared_ptr<ICameraInjectionSession> * _aidl_return)557 ndk::ScopedAStatus VirtualCameraDevice::openInjectionSession(
558 const std::shared_ptr<ICameraDeviceCallback>& in_callback,
559 std::shared_ptr<ICameraInjectionSession>* _aidl_return) {
560 ALOGV("%s", __func__);
561
562 (void)in_callback;
563 (void)_aidl_return;
564 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
565 }
566
setTorchMode(bool in_on)567 ndk::ScopedAStatus VirtualCameraDevice::setTorchMode(bool in_on) {
568 ALOGV("%s: on = %s", __func__, in_on ? "on" : "off");
569 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
570 }
571
turnOnTorchWithStrengthLevel(int32_t in_torchStrength)572 ndk::ScopedAStatus VirtualCameraDevice::turnOnTorchWithStrengthLevel(
573 int32_t in_torchStrength) {
574 ALOGV("%s: torchStrength = %d", __func__, in_torchStrength);
575 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
576 }
577
getTorchStrengthLevel(int32_t * _aidl_return)578 ndk::ScopedAStatus VirtualCameraDevice::getTorchStrengthLevel(
579 int32_t* _aidl_return) {
580 (void)_aidl_return;
581 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
582 }
583
dump(int fd,const char **,uint32_t)584 binder_status_t VirtualCameraDevice::dump(int fd, const char**, uint32_t) {
585 ALOGD("Dumping virtual camera %s", mCameraId.c_str());
586 const char* indent = " ";
587 const char* doubleIndent = " ";
588 dprintf(fd, "%svirtual_camera %s belongs to virtual device %d\n", indent,
589 mCameraId.c_str(),
590 getDeviceId(mCameraCharacteristics)
591 .value_or(VirtualCameraService::kDefaultDeviceId));
592 dprintf(fd, "%sSupportedStreamConfiguration:\n", indent);
593 for (auto& config : mSupportedInputConfigurations) {
594 dprintf(fd, "%s%s", doubleIndent, config.toString().c_str());
595 }
596 return STATUS_OK;
597 }
598
getCameraName() const599 std::string VirtualCameraDevice::getCameraName() const {
600 return std::string(kDevicePathPrefix) + mCameraId;
601 }
602
603 const std::vector<SupportedStreamConfiguration>&
getInputConfigs() const604 VirtualCameraDevice::getInputConfigs() const {
605 return mSupportedInputConfigurations;
606 }
607
getMaxInputResolution() const608 Resolution VirtualCameraDevice::getMaxInputResolution() const {
609 std::optional<Resolution> maxResolution =
610 getMaxResolution(mSupportedInputConfigurations);
611 if (!maxResolution.has_value()) {
612 ALOGE(
613 "%s: Cannot determine sensor size for virtual camera - input "
614 "configurations empty?",
615 __func__);
616 return Resolution(0, 0);
617 }
618 return maxResolution.value();
619 }
620
allocateInputStreamId()621 int VirtualCameraDevice::allocateInputStreamId() {
622 return mNextInputStreamId++;
623 }
624
sharedFromThis()625 std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
626 // SharedRefBase which BnCameraDevice inherits from breaks
627 // std::enable_shared_from_this. This is recommended replacement for
628 // shared_from_this() per documentation in binder_interface_utils.h.
629 return ref<VirtualCameraDevice>();
630 }
631
632 } // namespace virtualcamera
633 } // namespace companion
634 } // namespace android
635