1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "VirtualCameraRenderThread"
18 #include "VirtualCameraRenderThread.h"
19
20 #include <chrono>
21 #include <cstdint>
22 #include <cstring>
23 #include <future>
24 #include <memory>
25 #include <mutex>
26 #include <thread>
27 #include <utility>
28 #include <vector>
29
30 #include "Exif.h"
31 #include "GLES/gl.h"
32 #include "VirtualCameraDevice.h"
33 #include "VirtualCameraSessionContext.h"
34 #include "aidl/android/hardware/camera/common/Status.h"
35 #include "aidl/android/hardware/camera/device/BufferStatus.h"
36 #include "aidl/android/hardware/camera/device/CameraBlob.h"
37 #include "aidl/android/hardware/camera/device/CameraBlobId.h"
38 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
39 #include "aidl/android/hardware/camera/device/CaptureResult.h"
40 #include "aidl/android/hardware/camera/device/ErrorCode.h"
41 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
42 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
43 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
44 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
45 #include "android-base/thread_annotations.h"
46 #include "android/binder_auto_utils.h"
47 #include "android/hardware_buffer.h"
48 #include "hardware/gralloc.h"
49 #include "system/camera_metadata.h"
50 #include "ui/GraphicBuffer.h"
51 #include "ui/Rect.h"
52 #include "util/EglFramebuffer.h"
53 #include "util/JpegUtil.h"
54 #include "util/MetadataUtil.h"
55 #include "util/Util.h"
56 #include "utils/Errors.h"
57
58 namespace android {
59 namespace companion {
60 namespace virtualcamera {
61
62 using ::aidl::android::hardware::camera::common::Status;
63 using ::aidl::android::hardware::camera::device::BufferStatus;
64 using ::aidl::android::hardware::camera::device::CameraBlob;
65 using ::aidl::android::hardware::camera::device::CameraBlobId;
66 using ::aidl::android::hardware::camera::device::CameraMetadata;
67 using ::aidl::android::hardware::camera::device::CaptureResult;
68 using ::aidl::android::hardware::camera::device::ErrorCode;
69 using ::aidl::android::hardware::camera::device::ErrorMsg;
70 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
71 using ::aidl::android::hardware::camera::device::NotifyMsg;
72 using ::aidl::android::hardware::camera::device::ShutterMsg;
73 using ::aidl::android::hardware::camera::device::Stream;
74 using ::aidl::android::hardware::camera::device::StreamBuffer;
75 using ::aidl::android::hardware::graphics::common::PixelFormat;
76 using ::android::base::ScopedLockAssertion;
77
78 using ::android::hardware::camera::common::helper::ExifUtils;
79
80 namespace {
81
82 // helper type for the visitor
83 template <class... Ts>
84 struct overloaded : Ts... {
85 using Ts::operator()...;
86 };
87 // explicit deduction guide (not needed as of C++20)
88 template <class... Ts>
89 overloaded(Ts...) -> overloaded<Ts...>;
90
91 using namespace std::chrono_literals;
92
93 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
94
95 // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
96 // This roughly corresponds to frame latency, we set to
97 // documented minimum of 2.
98 static constexpr uint8_t kPipelineDepth = 2;
99
100 static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
101
102 static constexpr UpdateTextureTask kUpdateTextureTask;
103
createCaptureResultMetadata(const std::chrono::nanoseconds timestamp,const RequestSettings & requestSettings,const Resolution reportedSensorSize)104 CameraMetadata createCaptureResultMetadata(
105 const std::chrono::nanoseconds timestamp,
106 const RequestSettings& requestSettings,
107 const Resolution reportedSensorSize) {
108 // All of the keys used in the response needs to be referenced in
109 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
110 // in VirtualCameraDevice.cc).
111 MetadataBuilder builder =
112 MetadataBuilder()
113 .setAberrationCorrectionMode(
114 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
115 .setControlAeAvailableAntibandingModes(
116 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
117 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
118 .setControlAeExposureCompensation(0)
119 .setControlAeLockAvailable(false)
120 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
121 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
122 .setControlAePrecaptureTrigger(
123 // Limited devices are expected to have precapture ae enabled and
124 // respond to cancellation request. Since we don't actuall support
125 // AE at all, let's just respect the cancellation expectation in
126 // case it's requested
127 requestSettings.aePrecaptureTrigger ==
128 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
129 ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
130 : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
131 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
132 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
133 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
134 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
135 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
136 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
137 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
138 .setControlCaptureIntent(requestSettings.captureIntent)
139 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
140 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
141 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
142 .setControlVideoStabilizationMode(
143 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
144 .setCropRegion(0, 0, reportedSensorSize.width,
145 reportedSensorSize.height)
146 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
147 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
148 .setFlashMode(ANDROID_FLASH_MODE_OFF)
149 .setFocalLength(VirtualCameraDevice::kFocalLength)
150 .setJpegQuality(requestSettings.jpegQuality)
151 .setJpegOrientation(requestSettings.jpegOrientation)
152 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
153 requestSettings.thumbnailResolution.height)
154 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
155 .setLensOpticalStabilizationMode(
156 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
157 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
158 .setPipelineDepth(kPipelineDepth)
159 .setSensorTimestamp(timestamp)
160 .setStatisticsHotPixelMapMode(
161 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
162 .setStatisticsLensShadingMapMode(
163 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
164 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
165
166 if (requestSettings.fpsRange.has_value()) {
167 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
168 }
169
170 if (requestSettings.gpsCoordinates.has_value()) {
171 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
172 builder.setJpegGpsCoordinates(coordinates);
173 }
174
175 std::unique_ptr<CameraMetadata> metadata = builder.build();
176
177 if (metadata == nullptr) {
178 ALOGE("%s: Failed to build capture result metadata", __func__);
179 return CameraMetadata();
180 }
181 return std::move(*metadata);
182 }
183
createShutterNotifyMsg(int frameNumber,std::chrono::nanoseconds timestamp)184 NotifyMsg createShutterNotifyMsg(int frameNumber,
185 std::chrono::nanoseconds timestamp) {
186 NotifyMsg msg;
187 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
188 .frameNumber = frameNumber,
189 .timestamp = timestamp.count(),
190 });
191 return msg;
192 }
193
createBufferErrorNotifyMsg(int frameNumber,int streamId)194 NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
195 NotifyMsg msg;
196 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
197 .errorStreamId = streamId,
198 .errorCode = ErrorCode::ERROR_BUFFER});
199 return msg;
200 }
201
createRequestErrorNotifyMsg(int frameNumber)202 NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
203 NotifyMsg msg;
204 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
205 .frameNumber = frameNumber,
206 // errorStreamId needs to be set to -1 for ERROR_REQUEST
207 // (not tied to specific stream).
208 .errorStreamId = -1,
209 .errorCode = ErrorCode::ERROR_REQUEST});
210 return msg;
211 }
212
allocateTemporaryFramebuffer(EGLDisplay eglDisplay,const uint width,const int height)213 std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
214 EGLDisplay eglDisplay, const uint width, const int height) {
215 const AHardwareBuffer_Desc desc{
216 .width = static_cast<uint32_t>(width),
217 .height = static_cast<uint32_t>(height),
218 .layers = 1,
219 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
220 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
221 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
222 .rfu0 = 0,
223 .rfu1 = 0};
224
225 AHardwareBuffer* hwBufferPtr;
226 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
227 if (status != NO_ERROR) {
228 ALOGE(
229 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
230 __func__, status);
231 return nullptr;
232 }
233
234 return std::make_shared<EglFrameBuffer>(
235 eglDisplay,
236 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
237 }
238
isYuvFormat(const PixelFormat pixelFormat)239 bool isYuvFormat(const PixelFormat pixelFormat) {
240 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
241 case HAL_PIXEL_FORMAT_YCBCR_422_I:
242 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
243 case HAL_PIXEL_FORMAT_Y16:
244 case HAL_PIXEL_FORMAT_YV12:
245 case HAL_PIXEL_FORMAT_YCBCR_420_888:
246 return true;
247 default:
248 return false;
249 }
250 }
251
createExif(Resolution imageSize,const CameraMetadata resultMetadata,const std::vector<uint8_t> & compressedThumbnail={})252 std::vector<uint8_t> createExif(
253 Resolution imageSize, const CameraMetadata resultMetadata,
254 const std::vector<uint8_t>& compressedThumbnail = {}) {
255 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
256 exifUtils->initialize();
257
258 // Make a copy of the metadata in order to converting it the HAL metadata
259 // format (as opposed to the AIDL class) and use the setFromMetadata method
260 // from ExifUtil
261 camera_metadata_t* rawSettings =
262 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
263 if (rawSettings != nullptr) {
264 android::hardware::camera::common::helper::CameraMetadata halMetadata(
265 rawSettings);
266 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
267 }
268 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
269 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
270 exifUtils->setFlash(0);
271
272 std::vector<uint8_t> app1Data;
273
274 size_t thumbnailDataSize = compressedThumbnail.size();
275 const void* thumbnailData =
276 thumbnailDataSize > 0
277 ? reinterpret_cast<const void*>(compressedThumbnail.data())
278 : nullptr;
279
280 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
281 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
282 return app1Data;
283 }
284
285 const uint8_t* data = exifUtils->getApp1Buffer();
286 const size_t size = exifUtils->getApp1Length();
287
288 app1Data.insert(app1Data.end(), data, data + size);
289 return app1Data;
290 }
291
getMaxFrameDuration(const RequestSettings & requestSettings)292 std::chrono::nanoseconds getMaxFrameDuration(
293 const RequestSettings& requestSettings) {
294 if (requestSettings.fpsRange.has_value()) {
295 return std::chrono::nanoseconds(static_cast<uint64_t>(
296 1e9 / std::max(1, requestSettings.fpsRange->minFps)));
297 }
298 return std::chrono::nanoseconds(
299 static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
300 }
301
302 class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
303 public:
FrameAvailableListenerProxy(std::function<void ()> callback)304 FrameAvailableListenerProxy(std::function<void()> callback)
305 : mOnFrameAvailableCallback(callback) {
306 }
307
onFrameAvailable(const BufferItem &)308 virtual void onFrameAvailable(const BufferItem&) override {
309 ALOGV("%s: onFrameAvailable", __func__);
310 mOnFrameAvailableCallback();
311 }
312
313 private:
314 std::function<void()> mOnFrameAvailableCallback;
315 };
316
317 } // namespace
318
CaptureRequestBuffer(int streamId,int bufferId,sp<Fence> fence)319 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
320 sp<Fence> fence)
321 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
322 }
323
getStreamId() const324 int CaptureRequestBuffer::getStreamId() const {
325 return mStreamId;
326 }
327
getBufferId() const328 int CaptureRequestBuffer::getBufferId() const {
329 return mBufferId;
330 }
331
getFence() const332 sp<Fence> CaptureRequestBuffer::getFence() const {
333 return mFence;
334 }
335
VirtualCameraRenderThread(VirtualCameraSessionContext & sessionContext,const Resolution inputSurfaceSize,const Resolution reportedSensorSize,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)336 VirtualCameraRenderThread::VirtualCameraRenderThread(
337 VirtualCameraSessionContext& sessionContext,
338 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
339 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
340 : mCameraDeviceCallback(cameraDeviceCallback),
341 mInputSurfaceSize(inputSurfaceSize),
342 mReportedSensorSize(reportedSensorSize),
343 mSessionContext(sessionContext),
344 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
345 }
346
~VirtualCameraRenderThread()347 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
348 stop();
349 if (mThread.joinable()) {
350 mThread.join();
351 }
352 }
353
ProcessCaptureRequestTask(int frameNumber,const std::vector<CaptureRequestBuffer> & requestBuffers,const RequestSettings & requestSettings)354 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
355 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
356 const RequestSettings& requestSettings)
357 : mFrameNumber(frameNumber),
358 mBuffers(requestBuffers),
359 mRequestSettings(requestSettings) {
360 }
361
getFrameNumber() const362 int ProcessCaptureRequestTask::getFrameNumber() const {
363 return mFrameNumber;
364 }
365
getBuffers() const366 const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
367 const {
368 return mBuffers;
369 }
370
getRequestSettings() const371 const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
372 return mRequestSettings;
373 }
374
requestTextureUpdate()375 void VirtualCameraRenderThread::requestTextureUpdate() {
376 std::lock_guard<std::mutex> lock(mLock);
377 // If queue is not empty, we don't need to set the mTextureUpdateRequested
378 // flag, since the texture will be updated during ProcessCaptureRequestTask
379 // processing anyway.
380 if (mQueue.empty()) {
381 mTextureUpdateRequested = true;
382 mCondVar.notify_one();
383 }
384 }
385
enqueueTask(std::unique_ptr<ProcessCaptureRequestTask> task)386 void VirtualCameraRenderThread::enqueueTask(
387 std::unique_ptr<ProcessCaptureRequestTask> task) {
388 std::lock_guard<std::mutex> lock(mLock);
389 // When enqueving process capture request task, clear the
390 // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
391 // updated and it will be updated when processing ProcessCaptureRequestTask
392 // anyway.
393 mTextureUpdateRequested = false;
394 mQueue.emplace_back(std::move(task));
395 mCondVar.notify_one();
396 }
397
flush()398 void VirtualCameraRenderThread::flush() {
399 std::lock_guard<std::mutex> lock(mLock);
400 while (!mQueue.empty()) {
401 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
402 mQueue.pop_front();
403 flushCaptureRequest(*task);
404 }
405 }
406
start()407 void VirtualCameraRenderThread::start() {
408 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
409 }
410
stop()411 void VirtualCameraRenderThread::stop() {
412 {
413 std::lock_guard<std::mutex> lock(mLock);
414 mPendingExit = true;
415 mCondVar.notify_one();
416 }
417 }
418
getInputSurface()419 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
420 return mInputSurfaceFuture.get();
421 }
422
dequeueTask()423 RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
424 std::unique_lock<std::mutex> lock(mLock);
425 // Clang's thread safety analysis doesn't perform alias analysis,
426 // so it doesn't support moveable std::unique_lock.
427 //
428 // Lock assertion below is basically explicit declaration that
429 // the lock is held in this scope, which is true, since it's only
430 // released during waiting inside mCondVar.wait calls.
431 ScopedLockAssertion lockAssertion(mLock);
432
433 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
434 return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
435 });
436 if (mPendingExit) {
437 // Render thread task with null task signals render thread to terminate.
438 return RenderThreadTask(nullptr);
439 }
440 if (mTextureUpdateRequested) {
441 // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
442 // kUpdateTextureTask to signal we want render thread to update the texture
443 // (consume buffer from the queue).
444 mTextureUpdateRequested = false;
445 return RenderThreadTask(kUpdateTextureTask);
446 }
447 RenderThreadTask task(std::move(mQueue.front()));
448 mQueue.pop_front();
449 return task;
450 }
451
threadLoop()452 void VirtualCameraRenderThread::threadLoop() {
453 ALOGV("Render thread starting");
454
455 mEglDisplayContext = std::make_unique<EglDisplayContext>();
456 mEglTextureYuvProgram =
457 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
458 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
459 EglTextureProgram::TextureFormat::RGBA);
460 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
461 mInputSurfaceSize.width, mInputSurfaceSize.height);
462 sp<FrameAvailableListenerProxy> frameAvailableListener =
463 sp<FrameAvailableListenerProxy>::make(
464 [this]() { requestTextureUpdate(); });
465 mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
466
467 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
468
469 while (RenderThreadTask task = dequeueTask()) {
470 std::visit(
471 overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
472 processTask(*t);
473 },
474 [this](const UpdateTextureTask&) {
475 ALOGV("Idle update of the texture");
476 mEglSurfaceTexture->updateTexture();
477 }},
478 task);
479 }
480
481 // Destroy EGL utilities still on the render thread.
482 mEglSurfaceTexture.reset();
483 mEglTextureRgbProgram.reset();
484 mEglTextureYuvProgram.reset();
485 mEglDisplayContext.reset();
486
487 ALOGV("Render thread exiting");
488 }
489
processTask(const ProcessCaptureRequestTask & request)490 void VirtualCameraRenderThread::processTask(
491 const ProcessCaptureRequestTask& request) {
492 std::chrono::nanoseconds timestamp =
493 std::chrono::duration_cast<std::chrono::nanoseconds>(
494 std::chrono::steady_clock::now().time_since_epoch());
495 const std::chrono::nanoseconds lastAcquisitionTimestamp(
496 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
497 std::memory_order_relaxed));
498
499 if (request.getRequestSettings().fpsRange) {
500 const int maxFps =
501 std::max(1, request.getRequestSettings().fpsRange->maxFps);
502 const std::chrono::nanoseconds minFrameDuration(
503 static_cast<uint64_t>(1e9 / maxFps));
504 const std::chrono::nanoseconds frameDuration =
505 timestamp - lastAcquisitionTimestamp;
506 if (frameDuration < minFrameDuration) {
507 // We're too fast for the configured maxFps, let's wait a bit.
508 const std::chrono::nanoseconds sleepTime =
509 minFrameDuration - frameDuration;
510 ALOGV("Current frame duration would be %" PRIu64
511 " ns corresponding to, "
512 "sleeping for %" PRIu64
513 " ns before updating texture to match maxFps %d",
514 static_cast<uint64_t>(frameDuration.count()),
515 static_cast<uint64_t>(sleepTime.count()), maxFps);
516
517 std::this_thread::sleep_for(sleepTime);
518 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
519 std::chrono::steady_clock::now().time_since_epoch());
520 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
521 std::memory_order_relaxed);
522 }
523 }
524
525 // Calculate the maximal amount of time we can afford to wait for next frame.
526 const std::chrono::nanoseconds maxFrameDuration =
527 getMaxFrameDuration(request.getRequestSettings());
528 const std::chrono::nanoseconds elapsedDuration =
529 timestamp - lastAcquisitionTimestamp;
530 if (elapsedDuration < maxFrameDuration) {
531 // We can afford to wait for next frame.
532 // Note that if there's already new frame in the input Surface, the call
533 // below returns immediatelly.
534 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
535 elapsedDuration);
536 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
537 std::chrono::steady_clock::now().time_since_epoch());
538 if (!gotNewFrame) {
539 ALOGV(
540 "%s: No new frame received on input surface after waiting for "
541 "%" PRIu64 "ns, repeating last frame.",
542 __func__,
543 static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
544 }
545 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
546 std::memory_order_relaxed);
547 }
548 // Acquire new (most recent) image from the Surface.
549 mEglSurfaceTexture->updateTexture();
550
551 CaptureResult captureResult;
552 captureResult.fmqResultSize = 0;
553 captureResult.frameNumber = request.getFrameNumber();
554 // Partial result needs to be set to 1 when metadata are present.
555 captureResult.partialResult = 1;
556 captureResult.inputBuffer.streamId = -1;
557 captureResult.physicalCameraMetadata.resize(0);
558 captureResult.result = createCaptureResultMetadata(
559 timestamp, request.getRequestSettings(), mReportedSensorSize);
560
561 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
562 captureResult.outputBuffers.resize(buffers.size());
563
564 for (int i = 0; i < buffers.size(); ++i) {
565 const CaptureRequestBuffer& reqBuffer = buffers[i];
566 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
567 resBuffer.streamId = reqBuffer.getStreamId();
568 resBuffer.bufferId = reqBuffer.getBufferId();
569 resBuffer.status = BufferStatus::OK;
570
571 const std::optional<Stream> streamConfig =
572 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
573
574 if (!streamConfig.has_value()) {
575 resBuffer.status = BufferStatus::ERROR;
576 continue;
577 }
578
579 auto status = streamConfig->format == PixelFormat::BLOB
580 ? renderIntoBlobStreamBuffer(
581 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
582 captureResult.result, request.getRequestSettings(),
583 reqBuffer.getFence())
584 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
585 reqBuffer.getBufferId(),
586 reqBuffer.getFence());
587 if (!status.isOk()) {
588 resBuffer.status = BufferStatus::ERROR;
589 }
590 }
591
592 std::vector<NotifyMsg> notifyMsg{
593 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
594 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
595 if (resBuffer.status != BufferStatus::OK) {
596 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
597 resBuffer.streamId));
598 }
599 }
600
601 auto status = mCameraDeviceCallback->notify(notifyMsg);
602 if (!status.isOk()) {
603 ALOGE("%s: notify call failed: %s", __func__,
604 status.getDescription().c_str());
605 return;
606 }
607
608 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
609 captureResults(1);
610 captureResults[0] = std::move(captureResult);
611
612 status = mCameraDeviceCallback->processCaptureResult(captureResults);
613 if (!status.isOk()) {
614 ALOGE("%s: processCaptureResult call failed: %s", __func__,
615 status.getDescription().c_str());
616 return;
617 }
618
619 ALOGV("%s: Successfully called processCaptureResult", __func__);
620 }
621
flushCaptureRequest(const ProcessCaptureRequestTask & request)622 void VirtualCameraRenderThread::flushCaptureRequest(
623 const ProcessCaptureRequestTask& request) {
624 CaptureResult captureResult;
625 captureResult.fmqResultSize = 0;
626 captureResult.frameNumber = request.getFrameNumber();
627 captureResult.inputBuffer.streamId = -1;
628
629 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
630 captureResult.outputBuffers.resize(buffers.size());
631
632 for (int i = 0; i < buffers.size(); ++i) {
633 const CaptureRequestBuffer& reqBuffer = buffers[i];
634 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
635 resBuffer.streamId = reqBuffer.getStreamId();
636 resBuffer.bufferId = reqBuffer.getBufferId();
637 resBuffer.status = BufferStatus::ERROR;
638 sp<Fence> fence = reqBuffer.getFence();
639 if (fence != nullptr && fence->isValid()) {
640 resBuffer.releaseFence.fds.emplace_back(fence->dup());
641 }
642 }
643
644 auto status = mCameraDeviceCallback->notify(
645 {createRequestErrorNotifyMsg(request.getFrameNumber())});
646 if (!status.isOk()) {
647 ALOGE("%s: notify call failed: %s", __func__,
648 status.getDescription().c_str());
649 return;
650 }
651
652 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
653 captureResults(1);
654 captureResults[0] = std::move(captureResult);
655
656 status = mCameraDeviceCallback->processCaptureResult(captureResults);
657 if (!status.isOk()) {
658 ALOGE("%s: processCaptureResult call failed: %s", __func__,
659 status.getDescription().c_str());
660 }
661 }
662
createThumbnail(const Resolution resolution,const int quality)663 std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
664 const Resolution resolution, const int quality) {
665 if (resolution.width == 0 || resolution.height == 0) {
666 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
667 return {};
668 }
669
670 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
671 resolution.width, resolution.height, quality);
672 Resolution bufferSize = roundTo2DctSize(resolution);
673 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
674 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
675 if (framebuffer == nullptr) {
676 ALOGE(
677 "Failed to allocate temporary framebuffer for JPEG thumbnail "
678 "compression");
679 return {};
680 }
681
682 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
683 // doesn't correspond
684 // to input texture aspect ratio.
685 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
686 Rect(resolution.width, resolution.height))
687 .isOk()) {
688 ALOGE(
689 "Failed to render input texture into temporary framebuffer for JPEG "
690 "thumbnail");
691 return {};
692 }
693
694 std::vector<uint8_t> compressedThumbnail;
695 compressedThumbnail.resize(kJpegThumbnailBufferSize);
696 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
697 resolution.height);
698 std::optional<size_t> compressedSize =
699 compressJpeg(resolution.width, resolution.height, quality,
700 framebuffer->getHardwareBuffer(), {},
701 compressedThumbnail.size(), compressedThumbnail.data());
702 if (!compressedSize.has_value()) {
703 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
704 return {};
705 }
706 compressedThumbnail.resize(compressedSize.value());
707 return compressedThumbnail;
708 }
709
renderIntoBlobStreamBuffer(const int streamId,const int bufferId,const CameraMetadata & resultMetadata,const RequestSettings & requestSettings,sp<Fence> fence)710 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
711 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
712 const RequestSettings& requestSettings, sp<Fence> fence) {
713 std::shared_ptr<AHardwareBuffer> hwBuffer =
714 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
715 if (hwBuffer == nullptr) {
716 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
717 bufferId, streamId);
718 return cameraStatus(Status::INTERNAL_ERROR);
719 }
720
721 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
722 if (!stream.has_value()) {
723 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
724 return cameraStatus(Status::INTERNAL_ERROR);
725 }
726
727 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
728 stream->width, stream->height, requestSettings.jpegQuality);
729
730 // Let's create YUV framebuffer and render the surface into this.
731 // This will take care about rescaling as well as potential format conversion.
732 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
733 // size, however we pass the viewport corresponding to size of the stream so
734 // the image will be only rendered to the area corresponding to the stream
735 // size.
736 Resolution bufferSize =
737 roundTo2DctSize(Resolution(stream->width, stream->height));
738 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
739 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
740 if (framebuffer == nullptr) {
741 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
742 return cameraStatus(Status::INTERNAL_ERROR);
743 }
744
745 // Render into temporary framebuffer.
746 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
747 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
748 if (!status.isOk()) {
749 ALOGE("Failed to render input texture into temporary framebuffer");
750 return status;
751 }
752
753 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
754 fence);
755 if (planesLock.getStatus() != OK) {
756 return cameraStatus(Status::INTERNAL_ERROR);
757 }
758
759 std::vector<uint8_t> app1ExifData =
760 createExif(Resolution(stream->width, stream->height), resultMetadata,
761 createThumbnail(requestSettings.thumbnailResolution,
762 requestSettings.thumbnailJpegQuality));
763 std::optional<size_t> compressedSize = compressJpeg(
764 stream->width, stream->height, requestSettings.jpegQuality,
765 framebuffer->getHardwareBuffer(), app1ExifData,
766 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
767
768 if (!compressedSize.has_value()) {
769 ALOGE("%s: Failed to compress JPEG image", __func__);
770 return cameraStatus(Status::INTERNAL_ERROR);
771 }
772
773 CameraBlob cameraBlob{
774 .blobId = CameraBlobId::JPEG,
775 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
776
777 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
778 (stream->bufferSize - sizeof(cameraBlob)),
779 &cameraBlob, sizeof(cameraBlob));
780
781 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
782 __func__, compressedSize.value());
783
784 return ndk::ScopedAStatus::ok();
785 }
786
renderIntoImageStreamBuffer(int streamId,int bufferId,sp<Fence> fence)787 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
788 int streamId, int bufferId, sp<Fence> fence) {
789 ALOGV("%s", __func__);
790
791 const std::chrono::nanoseconds before =
792 std::chrono::duration_cast<std::chrono::nanoseconds>(
793 std::chrono::steady_clock::now().time_since_epoch());
794
795 // Render test pattern using EGL.
796 std::shared_ptr<EglFrameBuffer> framebuffer =
797 mSessionContext.fetchOrCreateEglFramebuffer(
798 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
799 if (framebuffer == nullptr) {
800 ALOGE(
801 "%s: Failed to get EGL framebuffer corresponding to buffer id "
802 "%d for streamId %d",
803 __func__, bufferId, streamId);
804 return cameraStatus(Status::ILLEGAL_ARGUMENT);
805 }
806
807 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
808
809 const std::chrono::nanoseconds after =
810 std::chrono::duration_cast<std::chrono::nanoseconds>(
811 std::chrono::steady_clock::now().time_since_epoch());
812
813 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
814 after.count() - before.count());
815
816 return ndk::ScopedAStatus::ok();
817 }
818
renderIntoEglFramebuffer(EglFrameBuffer & framebuffer,sp<Fence> fence,std::optional<Rect> viewport)819 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
820 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
821 ALOGV("%s", __func__);
822 // Wait for fence to clear.
823 if (fence != nullptr && fence->isValid()) {
824 status_t ret = fence->wait(kAcquireFenceTimeout.count());
825 if (ret != 0) {
826 ALOGE("Timeout while waiting for the acquire fence for buffer");
827 return cameraStatus(Status::INTERNAL_ERROR);
828 }
829 }
830
831 mEglDisplayContext->makeCurrent();
832 framebuffer.beforeDraw();
833
834 Rect viewportRect =
835 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
836 glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
837 viewportRect.getHeight());
838
839 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
840 if (textureBuffer == nullptr) {
841 // If there's no current buffer, nothing was written to the surface and
842 // texture is not initialized yet. Let's render the framebuffer black
843 // instead of rendering the texture.
844 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
845 glClear(GL_COLOR_BUFFER_BIT);
846 } else {
847 const bool renderSuccess =
848 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
849 ? mEglTextureYuvProgram->draw(
850 mEglSurfaceTexture->getTextureId(),
851 mEglSurfaceTexture->getTransformMatrix())
852 : mEglTextureRgbProgram->draw(
853 mEglSurfaceTexture->getTextureId(),
854 mEglSurfaceTexture->getTransformMatrix());
855 if (!renderSuccess) {
856 ALOGE("%s: Failed to render texture", __func__);
857 return cameraStatus(Status::INTERNAL_ERROR);
858 }
859 }
860 framebuffer.afterDraw();
861
862 return ndk::ScopedAStatus::ok();
863 }
864
865 } // namespace virtualcamera
866 } // namespace companion
867 } // namespace android
868