1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraSession"
19 #include "VirtualCameraSession.h"
20
21 #include <algorithm>
22 #include <atomic>
23 #include <chrono>
24 #include <cmath>
25 #include <cstddef>
26 #include <cstdint>
27 #include <cstring>
28 #include <map>
29 #include <memory>
30 #include <mutex>
31 #include <numeric>
32 #include <optional>
33 #include <tuple>
34 #include <unordered_set>
35 #include <utility>
36 #include <vector>
37
38 #include "CameraMetadata.h"
39 #include "EGL/egl.h"
40 #include "VirtualCameraDevice.h"
41 #include "VirtualCameraRenderThread.h"
42 #include "VirtualCameraStream.h"
43 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
44 #include "aidl/android/hardware/camera/common/Status.h"
45 #include "aidl/android/hardware/camera/device/BufferCache.h"
46 #include "aidl/android/hardware/camera/device/BufferStatus.h"
47 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
48 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
49 #include "aidl/android/hardware/camera/device/HalStream.h"
50 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
51 #include "aidl/android/hardware/camera/device/RequestTemplate.h"
52 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
53 #include "aidl/android/hardware/camera/device/Stream.h"
54 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
55 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56 #include "aidl/android/hardware/camera/device/StreamRotation.h"
57 #include "aidl/android/hardware/graphics/common/BufferUsage.h"
58 #include "aidl/android/hardware/graphics/common/PixelFormat.h"
59 #include "android/hardware_buffer.h"
60 #include "android/native_window_aidl.h"
61 #include "fmq/AidlMessageQueue.h"
62 #include "system/camera_metadata.h"
63 #include "ui/GraphicBuffer.h"
64 #include "util/EglDisplayContext.h"
65 #include "util/EglFramebuffer.h"
66 #include "util/EglProgram.h"
67 #include "util/JpegUtil.h"
68 #include "util/MetadataUtil.h"
69 #include "util/Util.h"
70
71 namespace android {
72 namespace companion {
73 namespace virtualcamera {
74
75 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
76 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
77 using ::aidl::android::hardware::camera::common::Status;
78 using ::aidl::android::hardware::camera::device::BufferCache;
79 using ::aidl::android::hardware::camera::device::CameraMetadata;
80 using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
81 using ::aidl::android::hardware::camera::device::CaptureRequest;
82 using ::aidl::android::hardware::camera::device::HalStream;
83 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
84 using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
85 using ::aidl::android::hardware::camera::device::RequestTemplate;
86 using ::aidl::android::hardware::camera::device::Stream;
87 using ::aidl::android::hardware::camera::device::StreamBuffer;
88 using ::aidl::android::hardware::camera::device::StreamConfiguration;
89 using ::aidl::android::hardware::common::fmq::MQDescriptor;
90 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
91 using ::aidl::android::hardware::graphics::common::BufferUsage;
92 using ::aidl::android::hardware::graphics::common::PixelFormat;
93 using ::android::base::unique_fd;
94
95 namespace {
96
97 using namespace std::chrono_literals;
98
99 // Size of request/result metadata fast message queue.
100 // Setting to 0 to always disables FMQ.
101 constexpr size_t kMetadataMsgQueueSize = 0;
102
103 // Maximum number of buffers to use per single stream.
104 constexpr size_t kMaxStreamBuffers = 2;
105
106 constexpr int kInvalidStreamId = -1;
107
requestTemplateToIntent(const RequestTemplate type)108 camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
109 const RequestTemplate type) {
110 switch (type) {
111 case RequestTemplate::PREVIEW:
112 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
113 case RequestTemplate::STILL_CAPTURE:
114 return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
115 case RequestTemplate::VIDEO_RECORD:
116 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
117 case RequestTemplate::VIDEO_SNAPSHOT:
118 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
119 default:
120 // Return PREVIEW by default
121 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
122 }
123 }
124
getMaxFps(const std::vector<SupportedStreamConfiguration> & configs)125 int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
126 return std::transform_reduce(
127 configs.begin(), configs.end(), 0,
128 [](const int a, const int b) { return std::max(a, b); },
129 [](const SupportedStreamConfiguration& config) { return config.maxFps; });
130 }
131
createDefaultRequestSettings(const RequestTemplate type,const std::vector<SupportedStreamConfiguration> & inputConfigs)132 CameraMetadata createDefaultRequestSettings(
133 const RequestTemplate type,
134 const std::vector<SupportedStreamConfiguration>& inputConfigs) {
135 int maxFps = getMaxFps(inputConfigs);
136 auto metadata =
137 MetadataBuilder()
138 .setAberrationCorrectionMode(
139 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
140 .setControlCaptureIntent(requestTemplateToIntent(type))
141 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
142 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
143 .setControlAeExposureCompensation(0)
144 .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
145 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
146 .setControlAePrecaptureTrigger(
147 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
148 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
149 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
150 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
151 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
152 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
153 .setFlashMode(ANDROID_FLASH_MODE_OFF)
154 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
155 .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
156 .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
157 .setJpegThumbnailSize(0, 0)
158 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
159 .build();
160 if (metadata == nullptr) {
161 ALOGE("%s: Failed to construct metadata for default request type %s",
162 __func__, toString(type).c_str());
163 return CameraMetadata();
164 } else {
165 ALOGV("%s: Successfully created metadata for request type %s", __func__,
166 toString(type).c_str());
167 }
168 return *metadata;
169 }
170
getHalStream(const Stream & stream)171 HalStream getHalStream(const Stream& stream) {
172 HalStream halStream;
173 halStream.id = stream.id;
174 halStream.physicalCameraId = stream.physicalCameraId;
175 halStream.maxBuffers = kMaxStreamBuffers;
176
177 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
178 // If format is implementation defined we need it to override
179 // it with actual format.
180 // TODO(b/301023410) Override with the format based on the
181 // camera configuration, once we support more formats.
182 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
183 } else {
184 halStream.overrideFormat = stream.format;
185 }
186 halStream.overrideDataSpace = stream.dataSpace;
187
188 halStream.producerUsage = static_cast<BufferUsage>(
189 static_cast<int64_t>(stream.usage) |
190 static_cast<int64_t>(BufferUsage::CAMERA_OUTPUT) |
191 static_cast<int64_t>(BufferUsage::GPU_RENDER_TARGET));
192
193 halStream.supportOffline = false;
194 return halStream;
195 }
196
getHighestResolutionStream(const std::vector<Stream> & streams)197 Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
198 return *(std::max_element(streams.begin(), streams.end(),
199 [](const Stream& a, const Stream& b) {
200 return a.width * a.height < b.width * b.height;
201 }));
202 }
203
resolutionFromStream(const Stream & stream)204 Resolution resolutionFromStream(const Stream& stream) {
205 return Resolution(stream.width, stream.height);
206 }
207
resolutionFromInputConfig(const SupportedStreamConfiguration & inputConfig)208 Resolution resolutionFromInputConfig(
209 const SupportedStreamConfiguration& inputConfig) {
210 return Resolution(inputConfig.width, inputConfig.height);
211 }
212
resolutionFromSurface(const sp<Surface> surface)213 std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
214 Resolution res{0, 0};
215 if (surface == nullptr) {
216 ALOGE("%s: Cannot get resolution from null surface", __func__);
217 return std::nullopt;
218 }
219
220 int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
221 if (status != NO_ERROR) {
222 ALOGE("%s: Failed to get width from surface", __func__);
223 return std::nullopt;
224 }
225
226 status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
227 if (status != NO_ERROR) {
228 ALOGE("%s: Failed to get height from surface", __func__);
229 return std::nullopt;
230 }
231 return res;
232 }
233
pickInputConfigurationForStreams(const std::vector<Stream> & requestedStreams,const std::vector<SupportedStreamConfiguration> & supportedInputConfigs)234 std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
235 const std::vector<Stream>& requestedStreams,
236 const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
237 Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
238 Resolution maxResolution = resolutionFromStream(maxResolutionStream);
239
240 // Find best fitting stream to satisfy all requested streams:
241 // Best fitting => same or higher resolution as input with lowest pixel count
242 // difference and same aspect ratio.
243 auto isBetterInputConfig = [maxResolution](
244 const SupportedStreamConfiguration& configA,
245 const SupportedStreamConfiguration& configB) {
246 int maxResPixelCount = maxResolution.width * maxResolution.height;
247 int pixelCountDiffA =
248 std::abs((configA.width * configA.height) - maxResPixelCount);
249 int pixelCountDiffB =
250 std::abs((configB.width * configB.height) - maxResPixelCount);
251
252 return pixelCountDiffA < pixelCountDiffB;
253 };
254
255 std::optional<SupportedStreamConfiguration> bestConfig;
256 for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
257 Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
258 if (inputConfigResolution < maxResolution ||
259 !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
260 // We don't want to upscale from lower resolution, or use different aspect
261 // ratio, skip.
262 continue;
263 }
264
265 if (!bestConfig.has_value() ||
266 isBetterInputConfig(inputConfig, bestConfig.value())) {
267 bestConfig = inputConfig;
268 }
269 }
270
271 return bestConfig;
272 }
273
createSettingsFromMetadata(const CameraMetadata & metadata)274 RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
275 return RequestSettings{
276 .jpegQuality = getJpegQuality(metadata).value_or(
277 VirtualCameraDevice::kDefaultJpegQuality),
278 .jpegOrientation = getJpegOrientation(metadata),
279 .thumbnailResolution =
280 getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
281 .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
282 VirtualCameraDevice::kDefaultJpegQuality),
283 .fpsRange = getFpsRange(metadata),
284 .captureIntent = getCaptureIntent(metadata).value_or(
285 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
286 .gpsCoordinates = getGpsCoordinates(metadata),
287 .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
288 }
289
290 }; // namespace
291
VirtualCameraSession(std::shared_ptr<VirtualCameraDevice> cameraDevice,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)292 VirtualCameraSession::VirtualCameraSession(
293 std::shared_ptr<VirtualCameraDevice> cameraDevice,
294 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
295 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
296 : mCameraDevice(cameraDevice),
297 mCameraDeviceCallback(cameraDeviceCallback),
298 mVirtualCameraClientCallback(virtualCameraClientCallback),
299 mCurrentInputStreamId(kInvalidStreamId) {
300 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
301 kMetadataMsgQueueSize, false /* non blocking */);
302 if (!mRequestMetadataQueue->isValid()) {
303 ALOGE("%s: invalid request fmq", __func__);
304 }
305
306 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
307 kMetadataMsgQueueSize, false /* non blocking */);
308 if (!mResultMetadataQueue->isValid()) {
309 ALOGE("%s: invalid result fmq", __func__);
310 }
311 }
312
close()313 ndk::ScopedAStatus VirtualCameraSession::close() {
314 ALOGV("%s", __func__);
315 {
316 std::lock_guard<std::mutex> lock(mLock);
317
318 if (mRenderThread != nullptr) {
319 mRenderThread->flush();
320 mRenderThread->stop();
321 mRenderThread = nullptr;
322
323 if (mVirtualCameraClientCallback != nullptr) {
324 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
325 }
326 mCurrentInputStreamId = kInvalidStreamId;
327 }
328 }
329
330 mSessionContext.closeAllStreams();
331 return ndk::ScopedAStatus::ok();
332 }
333
configureStreams(const StreamConfiguration & in_requestedConfiguration,std::vector<HalStream> * _aidl_return)334 ndk::ScopedAStatus VirtualCameraSession::configureStreams(
335 const StreamConfiguration& in_requestedConfiguration,
336 std::vector<HalStream>* _aidl_return) {
337 ALOGV("%s: requestedConfiguration: %s", __func__,
338 in_requestedConfiguration.toString().c_str());
339
340 if (_aidl_return == nullptr) {
341 return cameraStatus(Status::ILLEGAL_ARGUMENT);
342 }
343
344 std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
345 if (virtualCamera == nullptr) {
346 ALOGW("%s: configure called on already unregistered camera", __func__);
347 return cameraStatus(Status::CAMERA_DISCONNECTED);
348 }
349
350 mSessionContext.removeStreamsNotInStreamConfiguration(
351 in_requestedConfiguration);
352
353 auto& streams = in_requestedConfiguration.streams;
354 auto& halStreams = *_aidl_return;
355 halStreams.clear();
356 halStreams.resize(in_requestedConfiguration.streams.size());
357
358 if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
359 ALOGE(
360 "%s: Requested stream configuration is not supported, closing existing "
361 "session",
362 __func__);
363 close();
364 return cameraStatus(Status::ILLEGAL_ARGUMENT);
365 }
366
367 sp<Surface> inputSurface = nullptr;
368 int inputStreamId = -1;
369 std::optional<SupportedStreamConfiguration> inputConfig;
370 {
371 std::lock_guard<std::mutex> lock(mLock);
372 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
373 halStreams[i] = getHalStream(streams[i]);
374 if (mSessionContext.initializeStream(streams[i])) {
375 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
376 }
377 }
378
379 inputConfig = pickInputConfigurationForStreams(
380 streams, virtualCamera->getInputConfigs());
381 if (!inputConfig.has_value()) {
382 ALOGE(
383 "%s: Failed to pick any input configuration for stream configuration "
384 "request: %s",
385 __func__, in_requestedConfiguration.toString().c_str());
386 return cameraStatus(Status::ILLEGAL_ARGUMENT);
387 }
388
389 if (mRenderThread != nullptr) {
390 // If there's already a render thread, it means this is not a first
391 // configuration call. If the surface has the same resolution and pixel
392 // format as the picked config, we don't need to do anything, the current
393 // render thread is capable of serving new set of configuration. However
394 // if it differens, we need to discard the current surface and
395 // reinitialize the render thread.
396
397 std::optional<Resolution> currentInputResolution =
398 resolutionFromSurface(mRenderThread->getInputSurface());
399 if (currentInputResolution.has_value() &&
400 *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
401 ALOGI(
402 "%s: Newly configured set of streams matches existing client "
403 "surface (%dx%d)",
404 __func__, currentInputResolution->width,
405 currentInputResolution->height);
406 return ndk::ScopedAStatus::ok();
407 }
408
409 if (mVirtualCameraClientCallback != nullptr) {
410 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
411 }
412
413 ALOGV(
414 "%s: Newly requested output streams are not suitable for "
415 "pre-existing surface (%dx%d), creating new surface (%dx%d)",
416 __func__, currentInputResolution->width,
417 currentInputResolution->height, inputConfig->width,
418 inputConfig->height);
419
420 mRenderThread->flush();
421 mRenderThread->stop();
422 }
423
424 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
425 mSessionContext, resolutionFromInputConfig(*inputConfig),
426 virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
427 mRenderThread->start();
428 inputSurface = mRenderThread->getInputSurface();
429 inputStreamId = mCurrentInputStreamId =
430 virtualCamera->allocateInputStreamId();
431 }
432
433 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
434 // TODO(b/301023410) Pass streamId based on client input stream id once
435 // support for multiple input streams is implemented. For now we always
436 // create single texture.
437 mVirtualCameraClientCallback->onStreamConfigured(
438 inputStreamId, aidl::android::view::Surface(inputSurface.get()),
439 inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
440 }
441
442 return ndk::ScopedAStatus::ok();
443 }
444
constructDefaultRequestSettings(RequestTemplate in_type,CameraMetadata * _aidl_return)445 ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
446 RequestTemplate in_type, CameraMetadata* _aidl_return) {
447 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
448
449 std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
450 if (camera == nullptr) {
451 ALOGW(
452 "%s: constructDefaultRequestSettings called on already unregistered "
453 "camera",
454 __func__);
455 return cameraStatus(Status::CAMERA_DISCONNECTED);
456 }
457
458 switch (in_type) {
459 case RequestTemplate::PREVIEW:
460 case RequestTemplate::STILL_CAPTURE:
461 case RequestTemplate::VIDEO_RECORD:
462 case RequestTemplate::VIDEO_SNAPSHOT: {
463 *_aidl_return =
464 createDefaultRequestSettings(in_type, camera->getInputConfigs());
465 return ndk::ScopedAStatus::ok();
466 }
467 case RequestTemplate::MANUAL:
468 case RequestTemplate::ZERO_SHUTTER_LAG:
469 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
470 return ndk::ScopedAStatus::fromServiceSpecificError(
471 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
472 default:
473 ALOGE("%s: unknown request template type %d", __FUNCTION__,
474 static_cast<int>(in_type));
475 return ndk::ScopedAStatus::fromServiceSpecificError(
476 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
477 }
478 }
479
flush()480 ndk::ScopedAStatus VirtualCameraSession::flush() {
481 ALOGV("%s", __func__);
482 std::lock_guard<std::mutex> lock(mLock);
483 if (mRenderThread != nullptr) {
484 mRenderThread->flush();
485 }
486 return ndk::ScopedAStatus::ok();
487 }
488
getCaptureRequestMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)489 ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
490 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
491 ALOGV("%s", __func__);
492 *_aidl_return = mRequestMetadataQueue->dupeDesc();
493 return ndk::ScopedAStatus::ok();
494 }
495
getCaptureResultMetadataQueue(MQDescriptor<int8_t,SynchronizedReadWrite> * _aidl_return)496 ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
497 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
498 ALOGV("%s", __func__);
499 *_aidl_return = mResultMetadataQueue->dupeDesc();
500 return ndk::ScopedAStatus::ok();
501 }
502
isReconfigurationRequired(const CameraMetadata & in_oldSessionParams,const CameraMetadata & in_newSessionParams,bool * _aidl_return)503 ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
504 const CameraMetadata& in_oldSessionParams,
505 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
506 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
507 in_newSessionParams.toString().c_str(),
508 in_oldSessionParams.toString().c_str());
509
510 if (_aidl_return == nullptr) {
511 return ndk::ScopedAStatus::fromServiceSpecificError(
512 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
513 }
514
515 *_aidl_return = true;
516 return ndk::ScopedAStatus::ok();
517 }
518
processCaptureRequest(const std::vector<CaptureRequest> & in_requests,const std::vector<BufferCache> & in_cachesToRemove,int32_t * _aidl_return)519 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
520 const std::vector<CaptureRequest>& in_requests,
521 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
522 ALOGV("%s: request count: %zu", __func__, in_requests.size());
523
524 if (!in_cachesToRemove.empty()) {
525 mSessionContext.removeBufferCaches(in_cachesToRemove);
526 }
527
528 for (const auto& captureRequest : in_requests) {
529 auto status = processCaptureRequest(captureRequest);
530 if (!status.isOk()) {
531 return status;
532 }
533 }
534 *_aidl_return = in_requests.size();
535 return ndk::ScopedAStatus::ok();
536 }
537
signalStreamFlush(const std::vector<int32_t> & in_streamIds,int32_t in_streamConfigCounter)538 ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
539 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
540 ALOGV("%s", __func__);
541
542 (void)in_streamIds;
543 (void)in_streamConfigCounter;
544 return ndk::ScopedAStatus::ok();
545 }
546
switchToOffline(const std::vector<int32_t> & in_streamsToKeep,CameraOfflineSessionInfo * out_offlineSessionInfo,std::shared_ptr<ICameraOfflineSession> * _aidl_return)547 ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
548 const std::vector<int32_t>& in_streamsToKeep,
549 CameraOfflineSessionInfo* out_offlineSessionInfo,
550 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
551 ALOGV("%s", __func__);
552
553 (void)in_streamsToKeep;
554 (void)out_offlineSessionInfo;
555
556 if (_aidl_return == nullptr) {
557 return ndk::ScopedAStatus::fromServiceSpecificError(
558 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
559 }
560
561 *_aidl_return = nullptr;
562 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
563 }
564
repeatingRequestEnd(int32_t in_frameNumber,const std::vector<int32_t> & in_streamIds)565 ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
566 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
567 ALOGV("%s", __func__);
568 (void)in_frameNumber;
569 (void)in_streamIds;
570 return ndk::ScopedAStatus::ok();
571 }
572
getStreamIds() const573 std::set<int> VirtualCameraSession::getStreamIds() const {
574 return mSessionContext.getStreamIds();
575 }
576
processCaptureRequest(const CaptureRequest & request)577 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
578 const CaptureRequest& request) {
579 ALOGV("%s: CaptureRequest { frameNumber:%d }", __func__, request.frameNumber);
580
581 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
582 RequestSettings requestSettings;
583 int currentInputStreamId;
584 {
585 std::lock_guard<std::mutex> lock(mLock);
586
587 // If metadata it empty, last received metadata applies, if it's non-empty
588 // update it.
589 if (!request.settings.metadata.empty()) {
590 mCurrentRequestMetadata = request.settings;
591 }
592
593 // We don't have any metadata for this request - this means we received none
594 // in first request, this is an error state.
595 if (mCurrentRequestMetadata.metadata.empty()) {
596 return cameraStatus(Status::ILLEGAL_ARGUMENT);
597 }
598
599 requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
600
601 cameraCallback = mCameraDeviceCallback;
602 currentInputStreamId = mCurrentInputStreamId;
603 }
604
605 if (cameraCallback == nullptr) {
606 ALOGE(
607 "%s: processCaptureRequest called, but there's no camera callback "
608 "configured",
609 __func__);
610 return cameraStatus(Status::INTERNAL_ERROR);
611 }
612
613 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
614 ALOGE("Failed to import buffers from capture request.");
615 return cameraStatus(Status::INTERNAL_ERROR);
616 }
617
618 std::vector<CaptureRequestBuffer> taskBuffers;
619 taskBuffers.reserve(request.outputBuffers.size());
620 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
621 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
622 importFence(streamBuffer.acquireFence));
623 }
624
625 {
626 std::lock_guard<std::mutex> lock(mLock);
627 if (mRenderThread == nullptr) {
628 ALOGE(
629 "%s: processCaptureRequest (frameNumber %d)called before configure "
630 "(render thread not initialized)",
631 __func__, request.frameNumber);
632 return cameraStatus(Status::INTERNAL_ERROR);
633 }
634 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
635 request.frameNumber, taskBuffers, requestSettings));
636 }
637
638 if (mVirtualCameraClientCallback != nullptr) {
639 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
640 currentInputStreamId, request.frameNumber);
641 if (!status.isOk()) {
642 ALOGE(
643 "Failed to invoke onProcessCaptureRequest client callback for frame "
644 "%d",
645 request.frameNumber);
646 }
647 }
648
649 return ndk::ScopedAStatus::ok();
650 }
651
652 } // namespace virtualcamera
653 } // namespace companion
654 } // namespace android
655