1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // #define LOG_NDEBUG 0
18 #define LOG_TAG "VirtualCameraRenderThread"
19
20 #include "VirtualCameraRenderThread.h"
21
22 #include <android_companion_virtualdevice_flags.h>
23
24 #include <chrono>
25 #include <cstdint>
26 #include <cstring>
27 #include <future>
28 #include <memory>
29 #include <mutex>
30 #include <thread>
31 #include <utility>
32 #include <vector>
33
34 #include "Exif.h"
35 #include "GLES/gl.h"
36 #include "VirtualCameraCaptureResult.h"
37 #include "VirtualCameraDevice.h"
38 #include "VirtualCameraSessionContext.h"
39 #include "aidl/android/hardware/camera/common/Status.h"
40 #include "aidl/android/hardware/camera/device/BufferStatus.h"
41 #include "aidl/android/hardware/camera/device/CameraBlob.h"
42 #include "aidl/android/hardware/camera/device/CameraBlobId.h"
43 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
44 #include "aidl/android/hardware/camera/device/CaptureResult.h"
45 #include "aidl/android/hardware/camera/device/ErrorCode.h"
46 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
47 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
48 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
49 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
50 #include "android-base/thread_annotations.h"
51 #include "android/binder_auto_utils.h"
52 #include "android/hardware_buffer.h"
53 #include "system/camera_metadata.h"
54 #include "ui/GraphicBuffer.h"
55 #include "ui/Rect.h"
56 #include "util/EglFramebuffer.h"
57 #include "util/JpegUtil.h"
58 #include "util/Util.h"
59 #include "utils/Errors.h"
60
61 namespace android {
62 namespace companion {
63 namespace virtualcamera {
64
65 using ::aidl::android::hardware::camera::common::Status;
66 using ::aidl::android::hardware::camera::device::BufferStatus;
67 using ::aidl::android::hardware::camera::device::CameraBlob;
68 using ::aidl::android::hardware::camera::device::CameraBlobId;
69 using ::aidl::android::hardware::camera::device::CameraMetadata;
70 using ::aidl::android::hardware::camera::device::CaptureResult;
71 using ::aidl::android::hardware::camera::device::ErrorCode;
72 using ::aidl::android::hardware::camera::device::ErrorMsg;
73 using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
74 using ::aidl::android::hardware::camera::device::NotifyMsg;
75 using ::aidl::android::hardware::camera::device::ShutterMsg;
76 using ::aidl::android::hardware::camera::device::Stream;
77 using ::aidl::android::hardware::camera::device::StreamBuffer;
78 using ::aidl::android::hardware::graphics::common::PixelFormat;
79 using ::android::base::ScopedLockAssertion;
80
81 using ::android::hardware::camera::common::helper::ExifUtils;
82
83 namespace {
84
85 // helper type for the visitor
86 template <class... Ts>
87 struct overloaded : Ts... {
88 using Ts::operator()...;
89 };
90 // explicit deduction guide (not needed as of C++20)
91 template <class... Ts>
92 overloaded(Ts...) -> overloaded<Ts...>;
93
94 using namespace std::chrono_literals;
95
96 namespace flags = ::android::companion::virtualdevice::flags;
97
98 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
99
100 static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
101
102 static constexpr UpdateTextureTask kUpdateTextureTask;
103
104 // The number of nanosecond to wait for the first frame to be drawn on the input surface
105 static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
106
107 static constexpr double kOneSecondInNanos = 1e9;
108
createShutterNotifyMsg(int frameNumber,std::chrono::nanoseconds timestamp)109 NotifyMsg createShutterNotifyMsg(int frameNumber,
110 std::chrono::nanoseconds timestamp) {
111 NotifyMsg msg;
112 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
113 .frameNumber = frameNumber,
114 .timestamp = timestamp.count(),
115 });
116 return msg;
117 }
118
119 // Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
createErrorNotifyMsg(int frameNumber,int streamId,ErrorCode errorCode=ErrorCode::ERROR_BUFFER)120 NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
121 ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
122 NotifyMsg msg;
123 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
124 .errorStreamId = streamId,
125 .errorCode = errorCode});
126 return msg;
127 }
128
createRequestErrorNotifyMsg(int frameNumber)129 NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
130 NotifyMsg msg;
131 msg.set<NotifyMsg::Tag::error>(
132 ErrorMsg{.frameNumber = frameNumber,
133 // errorStreamId needs to be set to -1 for ERROR_REQUEST
134 // (not tied to specific stream).
135 .errorStreamId = -1,
136 .errorCode = ErrorCode::ERROR_REQUEST});
137 return msg;
138 }
139
allocateTemporaryFramebuffer(EGLDisplay eglDisplay,const uint width,const int height)140 std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
141 EGLDisplay eglDisplay, const uint width, const int height) {
142 const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
143 .height = static_cast<uint32_t>(height),
144 .layers = 1,
145 .format = kHardwareBufferFormat,
146 .usage = kHardwareBufferUsage,
147 .rfu0 = 0,
148 .rfu1 = 0};
149
150 AHardwareBuffer* hwBufferPtr;
151 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
152 if (status != NO_ERROR) {
153 ALOGE(
154 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
155 __func__, status);
156 return nullptr;
157 }
158
159 return std::make_shared<EglFrameBuffer>(
160 eglDisplay,
161 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
162 }
163
isYuvFormat(const PixelFormat pixelFormat)164 bool isYuvFormat(const PixelFormat pixelFormat) {
165 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
166 case HAL_PIXEL_FORMAT_YCBCR_422_I:
167 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
168 case HAL_PIXEL_FORMAT_Y16:
169 case HAL_PIXEL_FORMAT_YV12:
170 case HAL_PIXEL_FORMAT_YCBCR_420_888:
171 return true;
172 default:
173 return false;
174 }
175 }
176
createExif(Resolution imageSize,const CameraMetadata resultMetadata,const std::vector<uint8_t> & compressedThumbnail={})177 std::vector<uint8_t> createExif(
178 Resolution imageSize, const CameraMetadata resultMetadata,
179 const std::vector<uint8_t>& compressedThumbnail = {}) {
180 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
181 exifUtils->initialize();
182
183 // Make a copy of the metadata in order to converting it the HAL metadata
184 // format (as opposed to the AIDL class) and use the setFromMetadata method
185 // from ExifUtil
186 camera_metadata_t* rawSettings =
187 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
188 if (rawSettings != nullptr) {
189 android::hardware::camera::common::helper::CameraMetadata halMetadata(
190 rawSettings);
191 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
192 }
193 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
194 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
195 exifUtils->setFlash(0);
196
197 std::vector<uint8_t> app1Data;
198
199 size_t thumbnailDataSize = compressedThumbnail.size();
200 const void* thumbnailData =
201 thumbnailDataSize > 0
202 ? reinterpret_cast<const void*>(compressedThumbnail.data())
203 : nullptr;
204
205 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
206 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
207 return app1Data;
208 }
209
210 const uint8_t* data = exifUtils->getApp1Buffer();
211 const size_t size = exifUtils->getApp1Length();
212
213 app1Data.insert(app1Data.end(), data, data + size);
214 return app1Data;
215 }
216
getMaxFrameDuration(const RequestSettings & requestSettings)217 std::chrono::nanoseconds getMaxFrameDuration(
218 const RequestSettings& requestSettings) {
219 if (requestSettings.fpsRange.has_value()) {
220 return std::chrono::nanoseconds(static_cast<uint64_t>(
221 kOneSecondInNanos / std::max(1, requestSettings.fpsRange->minFps)));
222 }
223 return std::chrono::nanoseconds(
224 static_cast<uint64_t>(kOneSecondInNanos / VirtualCameraDevice::kMinFps));
225 }
226
227 // Translate a frame duration into a fps value with triple decimal precision
nanosToFps(std::chrono::nanoseconds frameDuration)228 double nanosToFps(std::chrono::nanoseconds frameDuration) {
229 const double oneSecondInNanos = 1e9;
230 const double fpsNanos = oneSecondInNanos / frameDuration.count();
231 return fpsNanos;
232 }
233
234 } // namespace
235
CaptureRequestBuffer(int streamId,int bufferId,sp<Fence> fence)236 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
237 sp<Fence> fence)
238 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
239 }
240
getStreamId() const241 int CaptureRequestBuffer::getStreamId() const {
242 return mStreamId;
243 }
244
getBufferId() const245 int CaptureRequestBuffer::getBufferId() const {
246 return mBufferId;
247 }
248
getFence() const249 sp<Fence> CaptureRequestBuffer::getFence() const {
250 return mFence;
251 }
252
VirtualCameraRenderThread(VirtualCameraSessionContext & sessionContext,const Resolution inputSurfaceSize,const Resolution reportedSensorSize,std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)253 VirtualCameraRenderThread::VirtualCameraRenderThread(
254 VirtualCameraSessionContext& sessionContext,
255 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
256 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
257 : mCameraDeviceCallback(cameraDeviceCallback),
258 mInputSurfaceSize(inputSurfaceSize),
259 mReportedSensorSize(reportedSensorSize),
260 mSessionContext(sessionContext),
261 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
262 }
263
~VirtualCameraRenderThread()264 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
265 stop();
266 if (mThread.joinable()) {
267 mThread.join();
268 }
269 }
270
ProcessCaptureRequestTask(int frameNumber,const std::vector<CaptureRequestBuffer> & requestBuffers,const RequestSettings & requestSettings)271 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
272 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
273 const RequestSettings& requestSettings)
274 : mFrameNumber(frameNumber),
275 mBuffers(requestBuffers),
276 mRequestSettings(requestSettings) {
277 }
278
getFrameNumber() const279 int ProcessCaptureRequestTask::getFrameNumber() const {
280 return mFrameNumber;
281 }
282
getBuffers() const283 const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
284 const {
285 return mBuffers;
286 }
287
getRequestSettings() const288 const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
289 return mRequestSettings;
290 }
291
requestTextureUpdate()292 void VirtualCameraRenderThread::requestTextureUpdate() {
293 std::lock_guard<std::mutex> lock(mLock);
294 ALOGV("%s", __func__);
295 // If queue is not empty, we don't need to set the mTextureUpdateRequested
296 // flag, since the texture will be updated during ProcessCaptureRequestTask
297 // processing anyway.
298 if (mQueue.empty()) {
299 mTextureUpdateRequested = true;
300 mCondVar.notify_one();
301 }
302 }
303
enqueueTask(std::unique_ptr<ProcessCaptureRequestTask> task)304 void VirtualCameraRenderThread::enqueueTask(
305 std::unique_ptr<ProcessCaptureRequestTask> task) {
306 std::lock_guard<std::mutex> lock(mLock);
307 // When enqueving process capture request task, clear the
308 // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
309 // updated and it will be updated when processing ProcessCaptureRequestTask
310 // anyway.
311 mTextureUpdateRequested = false;
312 mQueue.emplace_back(std::move(task));
313 mCondVar.notify_one();
314 }
315
flush()316 void VirtualCameraRenderThread::flush() {
317 std::lock_guard<std::mutex> lock(mLock);
318 while (!mQueue.empty()) {
319 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
320 mQueue.pop_front();
321 flushCaptureRequest(*task);
322 }
323 }
324
start()325 void VirtualCameraRenderThread::start() {
326 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
327 }
328
stop()329 void VirtualCameraRenderThread::stop() {
330 {
331 std::lock_guard<std::mutex> lock(mLock);
332 mPendingExit = true;
333 mCondVar.notify_one();
334 }
335 }
336
getInputSurface()337 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
338 return mInputSurfaceFuture.get();
339 }
340
dequeueTask()341 RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
342 std::unique_lock<std::mutex> lock(mLock);
343 // Clang's thread safety analysis doesn't perform alias analysis,
344 // so it doesn't support moveable std::unique_lock.
345 //
346 // Lock assertion below is basically explicit declaration that
347 // the lock is held in this scope, which is true, since it's only
348 // released during waiting inside mCondVar.wait calls.
349 ScopedLockAssertion lockAssertion(mLock);
350
351 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
352 return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
353 });
354 if (mPendingExit) {
355 // Render thread task with null task signals render thread to terminate.
356 return RenderThreadTask(nullptr);
357 }
358 if (mTextureUpdateRequested) {
359 // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
360 // kUpdateTextureTask to signal we want render thread to update the texture
361 // (consume buffer from the queue).
362 mTextureUpdateRequested = false;
363 return RenderThreadTask(kUpdateTextureTask);
364 }
365 RenderThreadTask task(std::move(mQueue.front()));
366 mQueue.pop_front();
367 return task;
368 }
369
threadLoop()370 void VirtualCameraRenderThread::threadLoop() {
371 ALOGV("Render thread starting");
372
373 mEglDisplayContext = std::make_unique<EglDisplayContext>();
374 mEglTextureYuvProgram =
375 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
376 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
377 EglTextureProgram::TextureFormat::RGBA);
378 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
379 mInputSurfaceSize.width, mInputSurfaceSize.height);
380 mEglSurfaceTexture->setFrameAvailableListener(
381 [this]() { requestTextureUpdate(); });
382
383 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
384
385 while (RenderThreadTask task = dequeueTask()) {
386 std::visit(
387 overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
388 processTask(*t);
389 },
390 [this](const UpdateTextureTask&) {
391 ALOGV("Idle update of the texture");
392 mEglSurfaceTexture->updateTexture();
393 }},
394 task);
395 }
396
397 // Destroy EGL utilities still on the render thread.
398 mEglSurfaceTexture.reset();
399 mEglTextureRgbProgram.reset();
400 mEglTextureYuvProgram.reset();
401 mEglDisplayContext.reset();
402
403 ALOGV("Render thread exiting");
404 }
405
processTask(const ProcessCaptureRequestTask & request)406 void VirtualCameraRenderThread::processTask(
407 const ProcessCaptureRequestTask& request) {
408 ALOGV("%s request frame number %d", __func__, request.getFrameNumber());
409 std::chrono::nanoseconds deviceTime =
410 std::chrono::duration_cast<std::chrono::nanoseconds>(
411 std::chrono::steady_clock::now().time_since_epoch());
412 const std::chrono::nanoseconds lastAcquisitionTimestamp(
413 mLastAcquisitionTimestampNanoseconds.exchange(deviceTime.count(),
414 std::memory_order_relaxed));
415
416 if (request.getRequestSettings().fpsRange) {
417 ALOGV("%s request fps {%d,%d}", __func__,
418 request.getRequestSettings().fpsRange->minFps,
419 request.getRequestSettings().fpsRange->maxFps);
420 int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
421 deviceTime = throttleRendering(maxFps, lastAcquisitionTimestamp, deviceTime);
422 }
423
424 // Calculate the maximal amount of time we can afford to wait for next frame.
425 const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
426 ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
427
428 const std::chrono::nanoseconds maxFrameDuration =
429 isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
430 : kMaxWaitFirstFrame;
431 const std::chrono::nanoseconds elapsedDuration =
432 isFirstFrameDrawn ? deviceTime - lastAcquisitionTimestamp : 0ns;
433
434 if (elapsedDuration < maxFrameDuration) {
435 // We can afford to wait for next frame.
436 // Note that if there's already new frame in the input Surface, the call
437 // below returns immediatelly.
438 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
439 elapsedDuration);
440 deviceTime = std::chrono::duration_cast<std::chrono::nanoseconds>(
441 std::chrono::steady_clock::now().time_since_epoch());
442 if (!gotNewFrame) {
443 if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
444 // We don't have any input ever drawn. This is considered as an error
445 // case. Notify the framework of the failure and return early.
446 ALOGW("Timed out waiting for first frame to be drawn.");
447 std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
448 request.getFrameNumber(), /* metadata = */ nullptr);
449 notifyTimeout(request, *captureResult);
450 submitCaptureResult(std::move(captureResult));
451 return;
452 }
453
454 ALOGV(
455 "%s: No new frame received on input surface after waiting for "
456 "%" PRIu64 "ns, repeating last frame.",
457 __func__,
458 static_cast<uint64_t>(
459 (deviceTime - lastAcquisitionTimestamp).count()));
460 }
461 mLastAcquisitionTimestampNanoseconds.store(deviceTime.count(),
462 std::memory_order_relaxed);
463 }
464 // Acquire new (most recent) image from the Surface.
465 mEglSurfaceTexture->updateTexture();
466 std::chrono::nanoseconds captureTimestamp = deviceTime;
467
468 if (flags::camera_timestamp_from_surface()) {
469 std::chrono::nanoseconds surfaceTimestamp =
470 getSurfaceTimestamp(elapsedDuration);
471 if (surfaceTimestamp.count() > 0) {
472 captureTimestamp = surfaceTimestamp;
473 }
474 ALOGV(
475 "%s surfaceTimestamp:%lld deviceTime:%lld captureTimestamp:%lld "
476 "(nanos)",
477 __func__, surfaceTimestamp.count(), deviceTime.count(),
478 captureTimestamp.count());
479 }
480
481 std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
482 request.getFrameNumber(),
483 createCaptureResultMetadata(
484 captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
485 renderOutputBuffers(request, *captureResult);
486
487 auto status = notifyShutter(request, *captureResult, captureTimestamp);
488 if (!status.isOk()) {
489 ALOGE("%s: notify call failed: %s", __func__,
490 status.getDescription().c_str());
491 return;
492 }
493
494 submitCaptureResult(std::move(captureResult));
495 }
496
throttleRendering(int maxFps,std::chrono::nanoseconds lastAcquisitionTimestamp,std::chrono::nanoseconds timestamp)497 std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
498 int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
499 std::chrono::nanoseconds timestamp) {
500 const std::chrono::nanoseconds minFrameDuration(
501 static_cast<uint64_t>(1e9 / maxFps));
502 const std::chrono::nanoseconds frameDuration =
503 timestamp - lastAcquisitionTimestamp;
504 if (frameDuration < minFrameDuration) {
505 // We're too fast for the configured maxFps, let's wait a bit.
506 const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
507 ALOGV("Current frame duration would be %" PRIu64
508 " ns corresponding to %.3f Fps, "
509 "sleeping for %" PRIu64
510 " ns before updating texture to match maxFps %d",
511 static_cast<uint64_t>(frameDuration.count()),
512 nanosToFps(frameDuration), static_cast<uint64_t>(sleepTime.count()),
513 maxFps);
514
515 std::this_thread::sleep_for(sleepTime);
516 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
517 std::chrono::steady_clock::now().time_since_epoch());
518 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
519 std::memory_order_relaxed);
520 }
521 return timestamp;
522 }
523
getSurfaceTimestamp(std::chrono::nanoseconds timeSinceLastFrame)524 std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
525 std::chrono::nanoseconds timeSinceLastFrame) {
526 std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
527 uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
528 if (lastSurfaceTimestamp > 0 &&
529 surfaceTimestamp.count() <= lastSurfaceTimestamp) {
530 // The timestamps were provided by the producer but we are
531 // repeating the last frame, so we increase the previous timestamp by
532 // the elapsed time sinced its capture, otherwise the camera framework
533 // will discard the frame.
534 surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
535 timeSinceLastFrame.count());
536 ALOGI(
537 "Surface's timestamp is stall. Artificially increasing the surface "
538 "timestamp by %lld",
539 timeSinceLastFrame.count());
540 }
541 mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
542 std::memory_order_relaxed);
543 return surfaceTimestamp;
544 }
545
createCaptureResult(int frameNumber,std::unique_ptr<CameraMetadata> metadata)546 std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
547 int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
548 std::unique_ptr<CaptureResult> captureResult =
549 std::make_unique<CaptureResult>();
550 captureResult->fmqResultSize = 0;
551 captureResult->frameNumber = frameNumber;
552 // Partial result needs to be set to 1 when metadata are present.
553 captureResult->partialResult = 1;
554 captureResult->inputBuffer.streamId = -1;
555 captureResult->physicalCameraMetadata.resize(0);
556 captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
557 return captureResult;
558 }
559
renderOutputBuffers(const ProcessCaptureRequestTask & request,CaptureResult & captureResult)560 void VirtualCameraRenderThread::renderOutputBuffers(
561 const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
562 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
563 captureResult.outputBuffers.resize(buffers.size());
564
565 for (int i = 0; i < buffers.size(); ++i) {
566 const CaptureRequestBuffer& reqBuffer = buffers[i];
567 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
568 resBuffer.streamId = reqBuffer.getStreamId();
569 resBuffer.bufferId = reqBuffer.getBufferId();
570 resBuffer.status = BufferStatus::OK;
571
572 const std::optional<Stream> streamConfig =
573 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
574
575 if (!streamConfig.has_value()) {
576 resBuffer.status = BufferStatus::ERROR;
577 continue;
578 }
579
580 auto status = streamConfig->format == PixelFormat::BLOB
581 ? renderIntoBlobStreamBuffer(
582 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
583 captureResult.result, request.getRequestSettings(),
584 reqBuffer.getFence())
585 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
586 reqBuffer.getBufferId(),
587 reqBuffer.getFence());
588 if (!status.isOk()) {
589 resBuffer.status = BufferStatus::ERROR;
590 }
591 }
592 }
593
notifyTimeout(const ProcessCaptureRequestTask & request,CaptureResult & captureResult)594 ::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
595 const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
596 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
597 captureResult.outputBuffers.resize(buffers.size());
598
599 std::vector<NotifyMsg> notifyMsgs;
600
601 for (int i = 0; i < buffers.size(); ++i) {
602 const CaptureRequestBuffer& reqBuffer = buffers[i];
603 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
604 resBuffer.streamId = reqBuffer.getStreamId();
605 resBuffer.bufferId = reqBuffer.getBufferId();
606 resBuffer.status = BufferStatus::ERROR;
607 notifyMsgs.push_back(createErrorNotifyMsg(
608 request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
609 }
610 return mCameraDeviceCallback->notify(notifyMsgs);
611 }
612
notifyShutter(const ProcessCaptureRequestTask & request,const CaptureResult & captureResult,std::chrono::nanoseconds captureTimestamp)613 ::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
614 const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
615 std::chrono::nanoseconds captureTimestamp) {
616 std::vector<NotifyMsg> notifyMsgs{
617 createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
618 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
619 if (resBuffer.status != BufferStatus::OK) {
620 notifyMsgs.push_back(
621 createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
622 }
623 }
624
625 return mCameraDeviceCallback->notify(notifyMsgs);
626 }
627
submitCaptureResult(std::unique_ptr<CaptureResult> captureResult)628 ::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
629 std::unique_ptr<CaptureResult> captureResult) {
630 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
631 captureResults;
632 captureResults.push_back(std::move(*captureResult));
633
634 ::ndk::ScopedAStatus status =
635 mCameraDeviceCallback->processCaptureResult(captureResults);
636 if (!status.isOk()) {
637 ALOGE("%s: processCaptureResult call failed: %s", __func__,
638 status.getDescription().c_str());
639 return status;
640 }
641
642 ALOGV("%s: Successfully called processCaptureResult", __func__);
643 return status;
644 }
645
flushCaptureRequest(const ProcessCaptureRequestTask & request)646 void VirtualCameraRenderThread::flushCaptureRequest(
647 const ProcessCaptureRequestTask& request) {
648 CaptureResult captureResult;
649 captureResult.fmqResultSize = 0;
650 captureResult.frameNumber = request.getFrameNumber();
651 captureResult.inputBuffer.streamId = -1;
652
653 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
654 captureResult.outputBuffers.resize(buffers.size());
655
656 for (int i = 0; i < buffers.size(); ++i) {
657 const CaptureRequestBuffer& reqBuffer = buffers[i];
658 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
659 resBuffer.streamId = reqBuffer.getStreamId();
660 resBuffer.bufferId = reqBuffer.getBufferId();
661 resBuffer.status = BufferStatus::ERROR;
662 sp<Fence> fence = reqBuffer.getFence();
663 if (fence != nullptr && fence->isValid()) {
664 resBuffer.releaseFence.fds.emplace_back(fence->dup());
665 }
666 }
667
668 auto status = mCameraDeviceCallback->notify(
669 {createRequestErrorNotifyMsg(request.getFrameNumber())});
670 if (!status.isOk()) {
671 ALOGE("%s: notify call failed: %s", __func__,
672 status.getDescription().c_str());
673 return;
674 }
675
676 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
677 captureResults(1);
678 captureResults[0] = std::move(captureResult);
679
680 status = mCameraDeviceCallback->processCaptureResult(captureResults);
681 if (!status.isOk()) {
682 ALOGE("%s: processCaptureResult call failed: %s", __func__,
683 status.getDescription().c_str());
684 }
685 }
686
createThumbnail(const Resolution resolution,const int quality)687 std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
688 const Resolution resolution, const int quality) {
689 if (resolution.width == 0 || resolution.height == 0) {
690 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
691 return {};
692 }
693
694 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
695 resolution.width, resolution.height, quality);
696 Resolution bufferSize = roundTo2DctSize(resolution);
697 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
698 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
699 if (framebuffer == nullptr) {
700 ALOGE(
701 "Failed to allocate temporary framebuffer for JPEG thumbnail "
702 "compression");
703 return {};
704 }
705
706 // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
707 // doesn't correspond
708 // to input texture aspect ratio.
709 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
710 Rect(resolution.width, resolution.height))
711 .isOk()) {
712 ALOGE(
713 "Failed to render input texture into temporary framebuffer for JPEG "
714 "thumbnail");
715 return {};
716 }
717
718 std::vector<uint8_t> compressedThumbnail;
719 compressedThumbnail.resize(kJpegThumbnailBufferSize);
720 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
721 resolution.height);
722 std::optional<size_t> compressedSize =
723 compressJpeg(resolution.width, resolution.height, quality,
724 framebuffer->getHardwareBuffer(), {},
725 compressedThumbnail.size(), compressedThumbnail.data());
726 if (!compressedSize.has_value()) {
727 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
728 return {};
729 }
730 compressedThumbnail.resize(compressedSize.value());
731 return compressedThumbnail;
732 }
733
renderIntoBlobStreamBuffer(const int streamId,const int bufferId,const CameraMetadata & resultMetadata,const RequestSettings & requestSettings,sp<Fence> fence)734 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
735 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
736 const RequestSettings& requestSettings, sp<Fence> fence) {
737 std::shared_ptr<AHardwareBuffer> hwBuffer =
738 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
739 if (hwBuffer == nullptr) {
740 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
741 bufferId, streamId);
742 return cameraStatus(Status::INTERNAL_ERROR);
743 }
744
745 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
746 if (!stream.has_value()) {
747 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
748 return cameraStatus(Status::INTERNAL_ERROR);
749 }
750
751 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
752 stream->width, stream->height, requestSettings.jpegQuality);
753
754 // Let's create YUV framebuffer and render the surface into this.
755 // This will take care about rescaling as well as potential format conversion.
756 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
757 // size, however we pass the viewport corresponding to size of the stream so
758 // the image will be only rendered to the area corresponding to the stream
759 // size.
760 Resolution bufferSize =
761 roundTo2DctSize(Resolution(stream->width, stream->height));
762 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
763 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
764 if (framebuffer == nullptr) {
765 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
766 return cameraStatus(Status::INTERNAL_ERROR);
767 }
768
769 // Render into temporary framebuffer.
770 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
771 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
772 if (!status.isOk()) {
773 ALOGE("Failed to render input texture into temporary framebuffer");
774 return status;
775 }
776
777 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
778 fence);
779 if (planesLock.getStatus() != OK) {
780 ALOGE("Failed to lock hwBuffer planes");
781 return cameraStatus(Status::INTERNAL_ERROR);
782 }
783
784 std::vector<uint8_t> app1ExifData =
785 createExif(Resolution(stream->width, stream->height), resultMetadata,
786 createThumbnail(requestSettings.thumbnailResolution,
787 requestSettings.thumbnailJpegQuality));
788
789 unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
790 void* outBuffer = (*planesLock).planes[0].data;
791 std::optional<size_t> compressedSize = compressJpeg(
792 stream->width, stream->height, requestSettings.jpegQuality,
793 framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
794
795 if (!compressedSize.has_value()) {
796 ALOGE("%s: Failed to compress JPEG image", __func__);
797 return cameraStatus(Status::INTERNAL_ERROR);
798 }
799
800 // Add the transport header at the end of the JPEG output buffer.
801 //
802 // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
803 // where the buffer_size is the size of gralloc buffer.
804 //
805 // See
806 // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
807 // for the full explanation of the following code.
808 CameraBlob cameraBlob{
809 .blobId = CameraBlobId::JPEG,
810 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
811
812 // Copy the cameraBlob to the end of the JPEG buffer.
813 uint8_t* jpegStreamEndAddress =
814 reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
815 (stream->bufferSize - sizeof(cameraBlob));
816 memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
817
818 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
819 __func__, compressedSize.value());
820
821 return ndk::ScopedAStatus::ok();
822 }
823
renderIntoImageStreamBuffer(int streamId,int bufferId,sp<Fence> fence)824 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
825 int streamId, int bufferId, sp<Fence> fence) {
826 ALOGV("%s", __func__);
827
828 const std::chrono::nanoseconds before =
829 std::chrono::duration_cast<std::chrono::nanoseconds>(
830 std::chrono::steady_clock::now().time_since_epoch());
831
832 // Render test pattern using EGL.
833 std::shared_ptr<EglFrameBuffer> framebuffer =
834 mSessionContext.fetchOrCreateEglFramebuffer(
835 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
836 if (framebuffer == nullptr) {
837 ALOGE(
838 "%s: Failed to get EGL framebuffer corresponding to buffer id "
839 "%d for streamId %d",
840 __func__, bufferId, streamId);
841 return cameraStatus(Status::ILLEGAL_ARGUMENT);
842 }
843
844 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
845
846 const std::chrono::nanoseconds after =
847 std::chrono::duration_cast<std::chrono::nanoseconds>(
848 std::chrono::steady_clock::now().time_since_epoch());
849
850 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
851 after.count() - before.count());
852
853 return ndk::ScopedAStatus::ok();
854 }
855
renderIntoEglFramebuffer(EglFrameBuffer & framebuffer,sp<Fence> fence,std::optional<Rect> viewport)856 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
857 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
858 ALOGV("%s", __func__);
859 // Wait for fence to clear.
860 if (fence != nullptr && fence->isValid()) {
861 status_t ret = fence->wait(kAcquireFenceTimeout.count());
862 if (ret != 0) {
863 ALOGE("Timeout while waiting for the acquire fence for buffer");
864 return cameraStatus(Status::INTERNAL_ERROR);
865 }
866 }
867
868 mEglDisplayContext->makeCurrent();
869 framebuffer.beforeDraw();
870
871 Rect viewportRect =
872 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
873 glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
874 viewportRect.getHeight());
875
876 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
877 if (textureBuffer == nullptr) {
878 // If there's no current buffer, nothing was written to the surface and
879 // texture is not initialized yet. Let's render the framebuffer black
880 // instead of rendering the texture.
881 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
882 glClear(GL_COLOR_BUFFER_BIT);
883 } else {
884 const bool renderSuccess =
885 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
886 ? mEglTextureYuvProgram->draw(
887 mEglSurfaceTexture->getTextureId(),
888 mEglSurfaceTexture->getTransformMatrix())
889 : mEglTextureRgbProgram->draw(
890 mEglSurfaceTexture->getTextureId(),
891 mEglSurfaceTexture->getTransformMatrix());
892 if (!renderSuccess) {
893 ALOGE("%s: Failed to render texture", __func__);
894 return cameraStatus(Status::INTERNAL_ERROR);
895 }
896 }
897 framebuffer.afterDraw();
898
899 return ndk::ScopedAStatus::ok();
900 }
901
902 } // namespace virtualcamera
903 } // namespace companion
904 } // namespace android
905