1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraSession"
19 #include "VirtualCameraSession.h"
20
21 #include <algorithm>
22 #include <atomic>
23 #include <chrono>
24 #include <cmath>
25 #include <cstddef>
26 #include <cstdint>
27 #include <cstring>
28 #include <map>
29 #include <memory>
30 #include <mutex>
31 #include <numeric>
32 #include <optional>
33 #include <tuple>
34 #include <unordered_set>
35 #include <utility>
36 #include <vector>
37
38 #include "CameraMetadata.h"
39 #include "EGL/egl.h"
40 #include "VirtualCameraDevice.h"
41 #include "VirtualCameraRenderThread.h"
42 #include "VirtualCameraStream.h"
43 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
44 #include "aidl/android/hardware/camera/common/Status.h"
45 #include "aidl/android/hardware/camera/device/BufferCache.h"
46 #include "aidl/android/hardware/camera/device/BufferStatus.h"
47 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
48 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
49 #include "aidl/android/hardware/camera/device/HalStream.h"
50 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
51 #include "aidl/android/hardware/camera/device/RequestTemplate.h"
52 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
53 #include "aidl/android/hardware/camera/device/Stream.h"
54 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
55 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56 #include "aidl/android/hardware/camera/device/StreamRotation.h"
57 #include "aidl/android/hardware/graphics/common/BufferUsage.h"
58 #include "aidl/android/hardware/graphics/common/PixelFormat.h"
59 #include "android/hardware_buffer.h"
60 #include "android/native_window_aidl.h"
61 #include "fmq/AidlMessageQueue.h"
62 #include "system/camera_metadata.h"
63 #include "ui/GraphicBuffer.h"
64 #include "util/EglDisplayContext.h"
65 #include "util/EglFramebuffer.h"
66 #include "util/EglProgram.h"
67 #include "util/JpegUtil.h"
68 #include "util/MetadataUtil.h"
69 #include "util/Util.h"
70
71 namespace android {
72 namespace companion {
73 namespace virtualcamera {
74
75 using ::aidl::android::companion::virtualcamera::Format;
76 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
77 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
78 using ::aidl::android::hardware::camera::common::Status;
79 using ::aidl::android::hardware::camera::device::BufferCache;
80 using ::aidl::android::hardware::camera::device::CameraMetadata;
81 using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
82 using ::aidl::android::hardware::camera::device::CaptureRequest;
83 using ::aidl::android::hardware::camera::device::HalStream;
84 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
85 using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
86 using ::aidl::android::hardware::camera::device::RequestTemplate;
87 using ::aidl::android::hardware::camera::device::Stream;
88 using ::aidl::android::hardware::camera::device::StreamBuffer;
89 using ::aidl::android::hardware::camera::device::StreamConfiguration;
90 using ::aidl::android::hardware::camera::device::StreamRotation;
91 using ::aidl::android::hardware::common::fmq::MQDescriptor;
92 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
93 using ::aidl::android::hardware::graphics::common::BufferUsage;
94 using ::aidl::android::hardware::graphics::common::PixelFormat;
95 using ::android::base::unique_fd;
96
97 namespace {
98
99 using metadata_ptr =
100 std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
101
102 using namespace std::chrono_literals;
103
104 // Size of request/result metadata fast message queue.
105 // Setting to 0 to always disables FMQ.
106 constexpr size_t kMetadataMsgQueueSize = 0;
107
108 // Maximum number of buffers to use per single stream.
109 constexpr size_t kMaxStreamBuffers = 2;
110
111 // Thumbnail size (0,0) correspods to disabling thumbnail.
112 const Resolution kDefaultJpegThumbnailSize(0, 0);
113
requestTemplateToIntent(const RequestTemplate type)114 camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
115 const RequestTemplate type) {
116 switch (type) {
117 case RequestTemplate::PREVIEW:
118 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
119 case RequestTemplate::STILL_CAPTURE:
120 return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
121 case RequestTemplate::VIDEO_RECORD:
122 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
123 case RequestTemplate::VIDEO_SNAPSHOT:
124 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
125 default:
126 // Return PREVIEW by default
127 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
128 }
129 }
130
getMaxFps(const std::vector<SupportedStreamConfiguration> & configs)131 int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
132 return std::transform_reduce(
133 configs.begin(), configs.end(), 0,
134 [](const int a, const int b) { return std::max(a, b); },
135 [](const SupportedStreamConfiguration& config) { return config.maxFps; });
136 }
137
createDefaultRequestSettings(const RequestTemplate type,const std::vector<SupportedStreamConfiguration> & inputConfigs)138 CameraMetadata createDefaultRequestSettings(
139 const RequestTemplate type,
140 const std::vector<SupportedStreamConfiguration>& inputConfigs) {
141 int maxFps = getMaxFps(inputConfigs);
142 auto metadata =
143 MetadataBuilder()
144 .setAberrationCorrectionMode(
145 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
146 .setControlCaptureIntent(requestTemplateToIntent(type))
147 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
148 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
149 .setControlAeExposureCompensation(0)
150 .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
151 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
152 .setControlAePrecaptureTrigger(
153 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
154 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
155 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
156 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
157 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
158 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
159 .setFlashMode(ANDROID_FLASH_MODE_OFF)
160 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
161 .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
162 .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
163 .setJpegThumbnailSize(0, 0)
164 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
165 .build();
166 if (metadata == nullptr) {
167 ALOGE("%s: Failed to construct metadata for default request type %s",
168 __func__, toString(type).c_str());
169 return CameraMetadata();
170 } else {
171 ALOGV("%s: Successfully created metadata for request type %s", __func__,
172 toString(type).c_str());
173 }
174 return *metadata;
175 }
176
getHalStream(const Stream & stream)177 HalStream getHalStream(const Stream& stream) {
178 HalStream halStream;
179 halStream.id = stream.id;
180 halStream.physicalCameraId = stream.physicalCameraId;
181 halStream.maxBuffers = kMaxStreamBuffers;
182
183 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
184 // If format is implementation defined we need it to override
185 // it with actual format.
186 // TODO(b/301023410) Override with the format based on the
187 // camera configuration, once we support more formats.
188 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
189 } else {
190 halStream.overrideFormat = stream.format;
191 }
192 halStream.overrideDataSpace = stream.dataSpace;
193
194 halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET;
195 halStream.supportOffline = false;
196 return halStream;
197 }
198
getHighestResolutionStream(const std::vector<Stream> & streams)199 Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
200 return *(std::max_element(streams.begin(), streams.end(),
201 [](const Stream& a, const Stream& b) {
202 return a.width * a.height < b.width * b.height;
203 }));
204 }
205
resolutionFromStream(const Stream & stream)206 Resolution resolutionFromStream(const Stream& stream) {
207 return Resolution(stream.width, stream.height);
208 }
209
resolutionFromInputConfig(const SupportedStreamConfiguration & inputConfig)210 Resolution resolutionFromInputConfig(
211 const SupportedStreamConfiguration& inputConfig) {
212 return Resolution(inputConfig.width, inputConfig.height);
213 }
214
resolutionFromSurface(const sp<Surface> surface)215 std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
216 Resolution res{0, 0};
217 if (surface == nullptr) {
218 ALOGE("%s: Cannot get resolution from null surface", __func__);
219 return std::nullopt;
220 }
221
222 int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
223 if (status != NO_ERROR) {
224 ALOGE("%s: Failed to get width from surface", __func__);
225 return std::nullopt;
226 }
227
228 status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
229 if (status != NO_ERROR) {
230 ALOGE("%s: Failed to get height from surface", __func__);
231 return std::nullopt;
232 }
233 return res;
234 }
235
pickInputConfigurationForStreams(const std::vector<Stream> & requestedStreams,const std::vector<SupportedStreamConfiguration> & supportedInputConfigs)236 std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
237 const std::vector<Stream>& requestedStreams,
238 const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
239 Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
240 Resolution maxResolution = resolutionFromStream(maxResolutionStream);
241
242 // Find best fitting stream to satisfy all requested streams:
243 // Best fitting => same or higher resolution as input with lowest pixel count
244 // difference and same aspect ratio.
245 auto isBetterInputConfig = [maxResolution](
246 const SupportedStreamConfiguration& configA,
247 const SupportedStreamConfiguration& configB) {
248 int maxResPixelCount = maxResolution.width * maxResolution.height;
249 int pixelCountDiffA =
250 std::abs((configA.width * configA.height) - maxResPixelCount);
251 int pixelCountDiffB =
252 std::abs((configB.width * configB.height) - maxResPixelCount);
253
254 return pixelCountDiffA < pixelCountDiffB;
255 };
256
257 std::optional<SupportedStreamConfiguration> bestConfig;
258 for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
259 Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
260 if (inputConfigResolution < maxResolution ||
261 !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
262 // We don't want to upscale from lower resolution, or use different aspect
263 // ratio, skip.
264 continue;
265 }
266
267 if (!bestConfig.has_value() ||
268 isBetterInputConfig(inputConfig, bestConfig.value())) {
269 bestConfig = inputConfig;
270 }
271 }
272
273 return bestConfig;
274 }
275
createSettingsFromMetadata(const CameraMetadata & metadata)276 RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
277 return RequestSettings{
278 .jpegQuality = getJpegQuality(metadata).value_or(
279 VirtualCameraDevice::kDefaultJpegQuality),
280 .jpegOrientation = getJpegOrientation(metadata),
281 .thumbnailResolution =
282 getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
283 .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
284 VirtualCameraDevice::kDefaultJpegQuality),
285 .fpsRange = getFpsRange(metadata),
286 .captureIntent = getCaptureIntent(metadata).value_or(
287 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
288 .gpsCoordinates = getGpsCoordinates(metadata),
289 .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
290 }
291
292 } // namespace
293
VirtualCameraSession(std::shared_ptr<VirtualCameraDevice> cameraDevice,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)294 VirtualCameraSession::VirtualCameraSession(
295 std::shared_ptr<VirtualCameraDevice> cameraDevice,
296 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
297 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
298 : mCameraDevice(cameraDevice),
299 mCameraDeviceCallback(cameraDeviceCallback),
300 mVirtualCameraClientCallback(virtualCameraClientCallback) {
301 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
302 kMetadataMsgQueueSize, false /* non blocking */);
303 if (!mRequestMetadataQueue->isValid()) {
304 ALOGE("%s: invalid request fmq", __func__);
305 }
306
307 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
308 kMetadataMsgQueueSize, false /* non blocking */);
309 if (!mResultMetadataQueue->isValid()) {
310 ALOGE("%s: invalid result fmq", __func__);
311 }
312 }
313
close()314 ndk::ScopedAStatus VirtualCameraSession::close() {
315 ALOGV("%s", __func__);
316 {
317 std::lock_guard<std::mutex> lock(mLock);
318
319 if (mVirtualCameraClientCallback != nullptr) {
320 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
321 }
322
323 if (mRenderThread != nullptr) {
324 mRenderThread->stop();
325 mRenderThread = nullptr;
326 }
327 }
328
329 mSessionContext.closeAllStreams();
330 return ndk::ScopedAStatus::ok();
331 }
332
configureStreams(const StreamConfiguration & in_requestedConfiguration,std::vector<HalStream> * _aidl_return)333 ndk::ScopedAStatus VirtualCameraSession::configureStreams(
334 const StreamConfiguration& in_requestedConfiguration,
335 std::vector<HalStream>* _aidl_return) {
336 ALOGV("%s: requestedConfiguration: %s", __func__,
337 in_requestedConfiguration.toString().c_str());
338
339 if (_aidl_return == nullptr) {
340 return cameraStatus(Status::ILLEGAL_ARGUMENT);
341 }
342
343 std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
344 if (virtualCamera == nullptr) {
345 ALOGW("%s: configure called on already unregistered camera", __func__);
346 return cameraStatus(Status::CAMERA_DISCONNECTED);
347 }
348
349 mSessionContext.removeStreamsNotInStreamConfiguration(
350 in_requestedConfiguration);
351
352 auto& streams = in_requestedConfiguration.streams;
353 auto& halStreams = *_aidl_return;
354 halStreams.clear();
355 halStreams.resize(in_requestedConfiguration.streams.size());
356
357 if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
358 ALOGE("%s: Requested stream configuration is not supported", __func__);
359 return cameraStatus(Status::ILLEGAL_ARGUMENT);
360 }
361
362 sp<Surface> inputSurface = nullptr;
363 int inputStreamId = -1;
364 std::optional<SupportedStreamConfiguration> inputConfig;
365 {
366 std::lock_guard<std::mutex> lock(mLock);
367 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
368 halStreams[i] = getHalStream(streams[i]);
369 if (mSessionContext.initializeStream(streams[i])) {
370 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
371 }
372 }
373
374 inputConfig = pickInputConfigurationForStreams(
375 streams, virtualCamera->getInputConfigs());
376 if (!inputConfig.has_value()) {
377 ALOGE(
378 "%s: Failed to pick any input configuration for stream configuration "
379 "request: %s",
380 __func__, in_requestedConfiguration.toString().c_str());
381 return cameraStatus(Status::ILLEGAL_ARGUMENT);
382 }
383
384 if (mRenderThread != nullptr) {
385 // If there's already a render thread, it means this is not a first
386 // configuration call. If the surface has the same resolution and pixel
387 // format as the picked config, we don't need to do anything, the current
388 // render thread is capable of serving new set of configuration. However
389 // if it differens, we need to discard the current surface and
390 // reinitialize the render thread.
391
392 std::optional<Resolution> currentInputResolution =
393 resolutionFromSurface(mRenderThread->getInputSurface());
394 if (currentInputResolution.has_value() &&
395 *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
396 ALOGI(
397 "%s: Newly configured set of streams matches existing client "
398 "surface (%dx%d)",
399 __func__, currentInputResolution->width,
400 currentInputResolution->height);
401 return ndk::ScopedAStatus::ok();
402 }
403
404 if (mVirtualCameraClientCallback != nullptr) {
405 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
406 }
407
408 ALOGV(
409 "%s: Newly requested output streams are not suitable for "
410 "pre-existing surface (%dx%d), creating new surface (%dx%d)",
411 __func__, currentInputResolution->width,
412 currentInputResolution->height, inputConfig->width,
413 inputConfig->height);
414
415 mRenderThread->flush();
416 mRenderThread->stop();
417 }
418
419 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
420 mSessionContext, resolutionFromInputConfig(*inputConfig),
421 virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
422 mRenderThread->start();
423 inputSurface = mRenderThread->getInputSurface();
424 inputStreamId = mCurrentInputStreamId =
425 virtualCamera->allocateInputStreamId();
426 }
427
428 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
429 // TODO(b/301023410) Pass streamId based on client input stream id once
430 // support for multiple input streams is implemented. For now we always
431 // create single texture.
432 mVirtualCameraClientCallback->onStreamConfigured(
433 inputStreamId, aidl::android::view::Surface(inputSurface.get()),
434 inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
435 }
436
437 return ndk::ScopedAStatus::ok();
438 }
439
constructDefaultRequestSettings(RequestTemplate in_type,CameraMetadata * _aidl_return)440 ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
441 RequestTemplate in_type, CameraMetadata* _aidl_return) {
442 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
443
444 std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
445 if (camera == nullptr) {
446 ALOGW(
447 "%s: constructDefaultRequestSettings called on already unregistered "
448 "camera",
449 __func__);
450 return cameraStatus(Status::CAMERA_DISCONNECTED);
451 }
452
453 switch (in_type) {
454 case RequestTemplate::PREVIEW:
455 case RequestTemplate::STILL_CAPTURE:
456 case RequestTemplate::VIDEO_RECORD:
457 case RequestTemplate::VIDEO_SNAPSHOT: {
458 *_aidl_return =
459 createDefaultRequestSettings(in_type, camera->getInputConfigs());
460 return ndk::ScopedAStatus::ok();
461 }
462 case RequestTemplate::MANUAL:
463 case RequestTemplate::ZERO_SHUTTER_LAG:
464 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
465 return ndk::ScopedAStatus::fromServiceSpecificError(
466 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
467 ;
468 default:
469 ALOGE("%s: unknown request template type %d", __FUNCTION__,
470 static_cast<int>(in_type));
471 return ndk::ScopedAStatus::fromServiceSpecificError(
472 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
473 ;
474 }
475 }
476
flush()477 ndk::ScopedAStatus VirtualCameraSession::flush() {
478 ALOGV("%s", __func__);
479 std::lock_guard<std::mutex> lock(mLock);
480 if (mRenderThread != nullptr) {
481 mRenderThread->flush();
482 }
483 return ndk::ScopedAStatus::ok();
484 }
485
getCaptureRequestMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)486 ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
487 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
488 ALOGV("%s", __func__);
489 *_aidl_return = mRequestMetadataQueue->dupeDesc();
490 return ndk::ScopedAStatus::ok();
491 }
492
getCaptureResultMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)493 ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
494 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
495 ALOGV("%s", __func__);
496 *_aidl_return = mResultMetadataQueue->dupeDesc();
497 return ndk::ScopedAStatus::ok();
498 }
499
isReconfigurationRequired(const CameraMetadata & in_oldSessionParams,const CameraMetadata & in_newSessionParams,bool * _aidl_return)500 ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
501 const CameraMetadata& in_oldSessionParams,
502 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
503 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
504 in_newSessionParams.toString().c_str(),
505 in_oldSessionParams.toString().c_str());
506
507 if (_aidl_return == nullptr) {
508 return ndk::ScopedAStatus::fromServiceSpecificError(
509 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
510 }
511
512 *_aidl_return = true;
513 return ndk::ScopedAStatus::ok();
514 }
515
processCaptureRequest(const std::vector<CaptureRequest> & in_requests,const std::vector<BufferCache> & in_cachesToRemove,int32_t * _aidl_return)516 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
517 const std::vector<CaptureRequest>& in_requests,
518 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
519 ALOGV("%s", __func__);
520
521 if (!in_cachesToRemove.empty()) {
522 mSessionContext.removeBufferCaches(in_cachesToRemove);
523 }
524
525 for (const auto& captureRequest : in_requests) {
526 auto status = processCaptureRequest(captureRequest);
527 if (!status.isOk()) {
528 return status;
529 }
530 }
531 *_aidl_return = in_requests.size();
532 return ndk::ScopedAStatus::ok();
533 }
534
signalStreamFlush(const std::vector<int32_t> & in_streamIds,int32_t in_streamConfigCounter)535 ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
536 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
537 ALOGV("%s", __func__);
538
539 (void)in_streamIds;
540 (void)in_streamConfigCounter;
541 return ndk::ScopedAStatus::ok();
542 }
543
switchToOffline(const std::vector<int32_t> & in_streamsToKeep,CameraOfflineSessionInfo * out_offlineSessionInfo,std::shared_ptr<ICameraOfflineSession> * _aidl_return)544 ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
545 const std::vector<int32_t>& in_streamsToKeep,
546 CameraOfflineSessionInfo* out_offlineSessionInfo,
547 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
548 ALOGV("%s", __func__);
549
550 (void)in_streamsToKeep;
551 (void)out_offlineSessionInfo;
552
553 if (_aidl_return == nullptr) {
554 return ndk::ScopedAStatus::fromServiceSpecificError(
555 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
556 }
557
558 *_aidl_return = nullptr;
559 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
560 }
561
repeatingRequestEnd(int32_t in_frameNumber,const std::vector<int32_t> & in_streamIds)562 ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
563 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
564 ALOGV("%s", __func__);
565 (void)in_frameNumber;
566 (void)in_streamIds;
567 return ndk::ScopedAStatus::ok();
568 }
569
getStreamIds() const570 std::set<int> VirtualCameraSession::getStreamIds() const {
571 return mSessionContext.getStreamIds();
572 }
573
processCaptureRequest(const CaptureRequest & request)574 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
575 const CaptureRequest& request) {
576 ALOGV("%s: request: %s", __func__, request.toString().c_str());
577
578 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
579 RequestSettings requestSettings;
580 int currentInputStreamId;
581 {
582 std::lock_guard<std::mutex> lock(mLock);
583
584 // If metadata it empty, last received metadata applies, if it's non-empty
585 // update it.
586 if (!request.settings.metadata.empty()) {
587 mCurrentRequestMetadata = request.settings;
588 }
589
590 // We don't have any metadata for this request - this means we received none
591 // in first request, this is an error state.
592 if (mCurrentRequestMetadata.metadata.empty()) {
593 return cameraStatus(Status::ILLEGAL_ARGUMENT);
594 }
595
596 requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
597
598 cameraCallback = mCameraDeviceCallback;
599 currentInputStreamId = mCurrentInputStreamId;
600 }
601
602 if (cameraCallback == nullptr) {
603 ALOGE(
604 "%s: processCaptureRequest called, but there's no camera callback "
605 "configured",
606 __func__);
607 return cameraStatus(Status::INTERNAL_ERROR);
608 }
609
610 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
611 ALOGE("Failed to import buffers from capture request.");
612 return cameraStatus(Status::INTERNAL_ERROR);
613 }
614
615 std::vector<CaptureRequestBuffer> taskBuffers;
616 taskBuffers.reserve(request.outputBuffers.size());
617 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
618 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
619 importFence(streamBuffer.acquireFence));
620 }
621
622 {
623 std::lock_guard<std::mutex> lock(mLock);
624 if (mRenderThread == nullptr) {
625 ALOGE(
626 "%s: processCaptureRequest (frameNumber %d)called before configure "
627 "(render thread not initialized)",
628 __func__, request.frameNumber);
629 return cameraStatus(Status::INTERNAL_ERROR);
630 }
631 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
632 request.frameNumber, taskBuffers, requestSettings));
633 }
634
635 if (mVirtualCameraClientCallback != nullptr) {
636 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
637 currentInputStreamId, request.frameNumber);
638 if (!status.isOk()) {
639 ALOGE(
640 "Failed to invoke onProcessCaptureRequest client callback for frame "
641 "%d",
642 request.frameNumber);
643 }
644 }
645
646 return ndk::ScopedAStatus::ok();
647 }
648
649 } // namespace virtualcamera
650 } // namespace companion
651 } // namespace android
652