1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
18 // *and* LOG_NNDEBUG to enable very verbose logging.
19
20 //#define LOG_NDEBUG 0
21 //#define LOG_NNDEBUG 0
22
23 #define LOG_TAG "EmulatedCamera3_QemuSensor"
24 #define ATRACE_TAG ATRACE_TAG_CAMERA
25
26 #ifdef LOG_NNDEBUG
27 #define ALOGVV(...) ALOGV(__VA_ARGS__)
28 #else
29 #define ALOGVV(...) ((void)0)
30 #endif
31
32 #include "qemu-pipeline3/QemuSensor.h"
33 #include "system/camera_metadata.h"
34 #include <gralloc_cb_bp.h>
35
36 #include <cmath>
37 #include <cstdlib>
38 #include <linux/videodev2.h>
39 #include <log/log.h>
40 #include <cutils/properties.h>
41 #include <ui/Rect.h>
42 #include <utils/Trace.h>
43
44 namespace android {
45
46 const nsecs_t QemuSensor::kExposureTimeRange[2] =
47 {1000L, 300000000L}; // 1 us - 0.3 sec
48 const nsecs_t QemuSensor::kFrameDurationRange[2] =
49 {33331760L, 300000000L}; // ~1/30 s - 0.3 sec
50 const nsecs_t QemuSensor::kMinVerticalBlank = 10000L;
51
52 const int32_t QemuSensor::kSensitivityRange[2] = {100, 1600};
53 const uint32_t QemuSensor::kDefaultSensitivity = 100;
54
55 const char QemuSensor::kHostCameraVerString[] = "ro.kernel.qemu.camera_protocol_ver";
56
QemuSensor(const char * deviceName,uint32_t width,uint32_t height,GraphicBufferMapper * gbm)57 QemuSensor::QemuSensor(const char *deviceName, uint32_t width, uint32_t height,
58 GraphicBufferMapper* gbm):
59 Thread(false),
60 mWidth(width),
61 mHeight(height),
62 mActiveArray{0, 0, width, height},
63 mLastRequestWidth(-1),
64 mLastRequestHeight(-1),
65 mCameraQemuClient(),
66 mDeviceName(deviceName),
67 mGBA(&GraphicBufferAllocator::get()),
68 mGBM(gbm),
69 mGotVSync(false),
70 mFrameDuration(kFrameDurationRange[0]),
71 mNextBuffers(nullptr),
72 mFrameNumber(0),
73 mCapturedBuffers(nullptr),
74 mListener(nullptr) {
75 mHostCameraVer = property_get_int32(kHostCameraVerString, 0);
76 ALOGV("QemuSensor created with pixel array %d x %d", width, height);
77 }
78
~QemuSensor()79 QemuSensor::~QemuSensor() {
80 shutDown();
81 }
82
startUp()83 status_t QemuSensor::startUp() {
84 ALOGV("%s: Entered", __FUNCTION__);
85
86 mCapturedBuffers = nullptr;
87 status_t res = run("EmulatedQemuCamera3::QemuSensor",
88 ANDROID_PRIORITY_URGENT_DISPLAY);
89
90 if (res != OK) {
91 ALOGE("Unable to start up sensor capture thread: %d", res);
92 }
93
94 char connect_str[256];
95 snprintf(connect_str, sizeof(connect_str), "name=%s", mDeviceName);
96 res = mCameraQemuClient.connectClient(connect_str);
97 if (res != NO_ERROR) {
98 return res;
99 }
100
101 res = mCameraQemuClient.queryConnect();
102 if (res == NO_ERROR) {
103 ALOGV("%s: Connected to device '%s'",
104 __FUNCTION__, (const char*) mDeviceName);
105 mState = ECDS_CONNECTED;
106 } else {
107 ALOGE("%s: Connection to device '%s' failed",
108 __FUNCTION__, (const char*) mDeviceName);
109 }
110
111 return res;
112 }
113
shutDown()114 status_t QemuSensor::shutDown() {
115 ALOGV("%s: Entered", __FUNCTION__);
116
117 status_t res = requestExitAndWait();
118 if (res != OK) {
119 ALOGE("Unable to shut down sensor capture thread: %d", res);
120 }
121
122 /* Stop the actual camera device. */
123 res = mCameraQemuClient.queryStop();
124 if (res == NO_ERROR) {
125 mState = ECDS_CONNECTED;
126 ALOGV("%s: Qemu camera device '%s' is stopped",
127 __FUNCTION__, (const char*) mDeviceName);
128 } else {
129 ALOGE("%s: Unable to stop device '%s'",
130 __FUNCTION__, (const char*) mDeviceName);
131 }
132
133 return res;
134 }
135
setFrameDuration(uint64_t ns)136 void QemuSensor::setFrameDuration(uint64_t ns) {
137 Mutex::Autolock lock(mControlMutex);
138 ALOGVV("Frame duration set to %f", ns/1000000.f);
139 mFrameDuration = ns;
140 }
141
setDestinationBuffers(Buffers * buffers)142 void QemuSensor::setDestinationBuffers(Buffers *buffers) {
143 Mutex::Autolock lock(mControlMutex);
144 mNextBuffers = buffers;
145 }
146
setFrameNumber(uint32_t frameNumber)147 void QemuSensor::setFrameNumber(uint32_t frameNumber) {
148 Mutex::Autolock lock(mControlMutex);
149 mFrameNumber = frameNumber;
150 }
151
waitForVSync(nsecs_t reltime)152 bool QemuSensor::waitForVSync(nsecs_t reltime) {
153 int res;
154 Mutex::Autolock lock(mControlMutex);
155
156 mGotVSync = false;
157 res = mVSync.waitRelative(mControlMutex, reltime);
158 if (res != OK && res != TIMED_OUT) {
159 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
160 return false;
161 }
162 return mGotVSync;
163 }
164
waitForNewFrame(nsecs_t reltime,nsecs_t * captureTime)165 bool QemuSensor::waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime) {
166 Mutex::Autolock lock(mReadoutMutex);
167 if (mCapturedBuffers == nullptr) {
168 int res;
169 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
170 if (res == TIMED_OUT) {
171 return false;
172 } else if (res != OK || mCapturedBuffers == nullptr) {
173 ALOGE("Error waiting for sensor readout signal: %d", res);
174 return false;
175 }
176 }
177 mReadoutComplete.signal();
178
179 *captureTime = mCaptureTime;
180 mCapturedBuffers = nullptr;
181 return true;
182 }
183
~QemuSensorListener()184 QemuSensor::QemuSensorListener::~QemuSensorListener() {
185 }
186
setQemuSensorListener(QemuSensorListener * listener)187 void QemuSensor::setQemuSensorListener(QemuSensorListener *listener) {
188 Mutex::Autolock lock(mControlMutex);
189 mListener = listener;
190 }
191
readyToRun()192 status_t QemuSensor::readyToRun() {
193 ALOGV("Starting up sensor thread");
194 mStartupTime = systemTime();
195 mNextCaptureTime = 0;
196 mNextCapturedBuffers = nullptr;
197 return OK;
198 }
199
threadLoop()200 bool QemuSensor::threadLoop() {
201 ATRACE_CALL();
202 /*
203 * Stages are out-of-order relative to a single frame's processing, but
204 * in-order in time.
205 */
206
207 /*
208 * Stage 1: Read in latest control parameters.
209 */
210 uint64_t frameDuration;
211 Buffers *nextBuffers;
212 uint32_t frameNumber;
213 QemuSensorListener *listener = nullptr;
214 {
215 // Lock while we're grabbing readout variables.
216 Mutex::Autolock lock(mControlMutex);
217 frameDuration = mFrameDuration;
218 nextBuffers = mNextBuffers;
219 frameNumber = mFrameNumber;
220 listener = mListener;
221 // Don't reuse a buffer set.
222 mNextBuffers = nullptr;
223
224 // Signal VSync for start of readout.
225 ALOGVV("QemuSensor VSync");
226 mGotVSync = true;
227 mVSync.signal();
228 }
229
230 /*
231 * Stage 3: Read out latest captured image.
232 */
233
234 Buffers *capturedBuffers = nullptr;
235 nsecs_t captureTime = 0;
236
237 nsecs_t startRealTime = systemTime();
238 /*
239 * Stagefright cares about system time for timestamps, so base simulated
240 * time on that.
241 */
242 nsecs_t simulatedTime = startRealTime;
243 nsecs_t frameEndRealTime = startRealTime + frameDuration;
244
245 if (mNextCapturedBuffers != nullptr) {
246 ALOGVV("QemuSensor starting readout");
247 /*
248 * Pretend we're doing readout now; will signal once enough time has
249 * elapsed.
250 */
251 capturedBuffers = mNextCapturedBuffers;
252 captureTime = mNextCaptureTime;
253 }
254
255 /*
256 * TODO: Move this signal to another thread to simulate readout time
257 * properly.
258 */
259 if (capturedBuffers != nullptr) {
260 ALOGVV("QemuSensor readout complete");
261 Mutex::Autolock lock(mReadoutMutex);
262 if (mCapturedBuffers != nullptr) {
263 ALOGV("Waiting for readout thread to catch up!");
264 mReadoutComplete.wait(mReadoutMutex);
265 }
266
267 mCapturedBuffers = capturedBuffers;
268 mCaptureTime = captureTime;
269 mReadoutAvailable.signal();
270 capturedBuffers = nullptr;
271 }
272
273 /*
274 * Stage 2: Capture new image.
275 */
276 mNextCaptureTime = simulatedTime;
277 mNextCapturedBuffers = nextBuffers;
278
279 if (mNextCapturedBuffers != nullptr) {
280
281 int64_t timestamp = 0L;
282
283 // Might be adding more buffers, so size isn't constant.
284 for (size_t i = 0; i < mNextCapturedBuffers->size(); ++i) {
285 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
286 ALOGVV("QemuSensor capturing buffer %d: stream %d,"
287 " %d x %d, format %x, stride %d, buf %p, img %p",
288 i, b.streamId, b.width, b.height, b.format, b.stride,
289 b.buffer, b.img);
290 switch (b.format) {
291 case HAL_PIXEL_FORMAT_RGB_888:
292 captureRGB(b.img, b.width, b.height, b.stride, ×tamp);
293 break;
294 case HAL_PIXEL_FORMAT_RGBA_8888:
295 if (mHostCameraVer == 1) {
296 captureRGBA(b.width, b.height, b.stride, ×tamp, b.buffer);
297 } else {
298 captureRGBA(b.img, b.width, b.height, b.stride, ×tamp);
299 }
300 break;
301 case HAL_PIXEL_FORMAT_BLOB:
302 if (b.dataSpace == HAL_DATASPACE_DEPTH) {
303 ALOGE("%s: Depth clouds unsupported", __FUNCTION__);
304 } else {
305 /*
306 * Add auxillary buffer of the right size. Assumes only
307 * one BLOB (JPEG) buffer is in mNextCapturedBuffers.
308 */
309 StreamBuffer bAux;
310 bAux.streamId = 0;
311 bAux.width = b.width;
312 bAux.height = b.height;
313 bAux.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
314 bAux.stride = b.width;
315 if (mHostCameraVer == 1) {
316 const uint64_t usage =
317 GRALLOC_USAGE_HW_CAMERA_READ |
318 GRALLOC_USAGE_HW_CAMERA_WRITE |
319 GRALLOC_USAGE_HW_TEXTURE;
320 const uint64_t graphicBufferId = 0; // not used
321 const uint32_t layerCount = 1;
322 buffer_handle_t handle;
323 uint32_t stride;
324
325 status_t status = mGBA->allocate(
326 bAux.width, bAux.height, bAux.format,
327 layerCount, usage,
328 &handle, &stride,
329 graphicBufferId, "QemuSensor");
330 if (status != OK) {
331 LOG_ALWAYS_FATAL("allocate failed");
332 }
333
334 android_ycbcr ycbcr = {};
335 mGBM->lockYCbCr(handle,
336 GRALLOC_USAGE_HW_CAMERA_WRITE,
337 Rect(0, 0, bAux.width, bAux.height),
338 &ycbcr);
339
340 bAux.buffer = new buffer_handle_t;
341 *bAux.buffer = handle;
342 bAux.img = (uint8_t*)ycbcr.y;
343 } else {
344 bAux.buffer = nullptr;
345 // TODO: Reuse these.
346 bAux.img = new uint8_t[b.width * b.height * 3];
347 }
348 mNextCapturedBuffers->push_back(bAux);
349 }
350 break;
351 case HAL_PIXEL_FORMAT_YCbCr_420_888:
352 if (mHostCameraVer == 1) {
353 captureYU12(b.width, b.height, b.stride, ×tamp, b.buffer);
354 } else {
355 captureYU12(b.img, b.width, b.height, b.stride, ×tamp);
356 }
357 break;
358 default:
359 ALOGE("%s: Unknown/unsupported format %x, no output",
360 __FUNCTION__, b.format);
361 break;
362 }
363 }
364 if (timestamp != 0UL) {
365 mNextCaptureTime = timestamp;
366 }
367 // Note: we have to do this after the actual capture so that the
368 // capture time is accurate as reported from QEMU.
369 if (listener != nullptr) {
370 listener->onQemuSensorEvent(frameNumber, QemuSensorListener::EXPOSURE_START,
371 mNextCaptureTime);
372 }
373 }
374
375 ALOGVV("QemuSensor vertical blanking interval");
376 nsecs_t workDoneRealTime = systemTime();
377 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok.
378 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
379 timespec t;
380 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
381 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
382
383 int ret;
384 do {
385 ret = nanosleep(&t, &t);
386 } while (ret != 0);
387 }
388 ALOGVV("Frame cycle took %d ms, target %d ms",
389 (int) ((systemTime() - startRealTime) / 1000000),
390 (int) (frameDuration / 1000000));
391 return true;
392 }
393
captureRGBA(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)394 void QemuSensor::captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
395 uint32_t stride, int64_t *timestamp) {
396 ATRACE_CALL();
397 status_t res;
398 if (width != (uint32_t)mLastRequestWidth ||
399 height != (uint32_t)mLastRequestHeight) {
400 ALOGI("%s: Dimensions for the current request (%dx%d) differ "
401 "from the previous request (%dx%d). Restarting camera",
402 __FUNCTION__, width, height, mLastRequestWidth,
403 mLastRequestHeight);
404
405 if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
406 // We only need to stop the camera if this isn't the first request.
407
408 // Stop the camera device.
409 res = mCameraQemuClient.queryStop();
410 if (res == NO_ERROR) {
411 mState = ECDS_CONNECTED;
412 ALOGV("%s: Qemu camera device '%s' is stopped",
413 __FUNCTION__, (const char*) mDeviceName);
414 } else {
415 ALOGE("%s: Unable to stop device '%s'",
416 __FUNCTION__, (const char*) mDeviceName);
417 }
418 }
419
420 /*
421 * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
422 * and asks for the video format from the pixFmt parameter, which is
423 * V4L2_PIX_FMT_YUV420 in our implementation.
424 */
425 uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
426 res = mCameraQemuClient.queryStart(pixFmt, width, height);
427 if (res == NO_ERROR) {
428 mLastRequestWidth = width;
429 mLastRequestHeight = height;
430 ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
431 __FUNCTION__, (const char*) mDeviceName,
432 reinterpret_cast<const char*>(&pixFmt),
433 mWidth, mHeight);
434 mState = ECDS_STARTED;
435 } else {
436 ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
437 __FUNCTION__, (const char*) mDeviceName,
438 reinterpret_cast<const char*>(&pixFmt),
439 mWidth, mHeight);
440 return;
441 }
442 }
443 if (width != stride) {
444 ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
445 width, stride);
446 }
447
448 // Since the format is V4L2_PIX_FMT_RGB32, we need 4 bytes per pixel.
449 size_t bufferSize = width * height * 4;
450 // Apply no white balance or exposure compensation.
451 float whiteBalance[] = {1.0f, 1.0f, 1.0f};
452 float exposureCompensation = 1.0f;
453 // Read from webcam.
454 mCameraQemuClient.queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
455 whiteBalance[1], whiteBalance[2],
456 exposureCompensation, timestamp);
457
458 ALOGVV("RGBA sensor image captured");
459 }
460
captureRGBA(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)461 void QemuSensor::captureRGBA(uint32_t width, uint32_t height,
462 uint32_t stride, int64_t *timestamp, buffer_handle_t* handle) {
463 ATRACE_CALL();
464 status_t res;
465 if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
466 uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
467 res = mCameraQemuClient.queryStart();
468 if (res == NO_ERROR) {
469 mLastRequestWidth = width;
470 mLastRequestHeight = height;
471 ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
472 __FUNCTION__, (const char*) mDeviceName,
473 reinterpret_cast<const char*>(&pixFmt),
474 mWidth, mHeight);
475 mState = ECDS_STARTED;
476 } else {
477 ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
478 __FUNCTION__, (const char*) mDeviceName,
479 reinterpret_cast<const char*>(&pixFmt),
480 mWidth, mHeight);
481 return;
482 }
483 }
484 if (width != stride) {
485 ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
486 width, stride);
487 }
488
489 float whiteBalance[] = {1.0f, 1.0f, 1.0f};
490 float exposureCompensation = 1.0f;
491 const cb_handle_t* cb = cb_handle_t::from(*handle);
492 LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
493 const uint64_t offset = cb->getMmapedOffset();
494 mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_RGB32, offset,
495 whiteBalance[0], whiteBalance[1], whiteBalance[2],
496 exposureCompensation, timestamp);
497
498 ALOGVV("RGBA sensor image captured");
499 }
500
captureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)501 void QemuSensor::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
502 ALOGE("%s: Not implemented", __FUNCTION__);
503 }
504
captureYU12(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)505 void QemuSensor::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride,
506 int64_t *timestamp) {
507 ATRACE_CALL();
508 status_t res;
509 if (width != (uint32_t)mLastRequestWidth ||
510 height != (uint32_t)mLastRequestHeight) {
511 ALOGI("%s: Dimensions for the current request (%dx%d) differ "
512 "from the previous request (%dx%d). Restarting camera",
513 __FUNCTION__, width, height, mLastRequestWidth,
514 mLastRequestHeight);
515
516 if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
517 // We only need to stop the camera if this isn't the first request.
518 // Stop the camera device.
519 res = mCameraQemuClient.queryStop();
520 if (res == NO_ERROR) {
521 mState = ECDS_CONNECTED;
522 ALOGV("%s: Qemu camera device '%s' is stopped",
523 __FUNCTION__, (const char*) mDeviceName);
524 } else {
525 ALOGE("%s: Unable to stop device '%s'",
526 __FUNCTION__, (const char*) mDeviceName);
527 }
528 }
529
530 /*
531 * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
532 * and asks for the video format from the pixFmt parameter, which is
533 * V4L2_PIX_FMT_YUV420 in our implementation.
534 */
535 uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
536 res = mCameraQemuClient.queryStart(pixFmt, width, height);
537 if (res == NO_ERROR) {
538 mLastRequestWidth = width;
539 mLastRequestHeight = height;
540 ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
541 __FUNCTION__, (const char*) mDeviceName,
542 reinterpret_cast<const char*>(&pixFmt),
543 mWidth, mHeight);
544 mState = ECDS_STARTED;
545 } else {
546 ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
547 __FUNCTION__, (const char*) mDeviceName,
548 reinterpret_cast<const char*>(&pixFmt),
549 mWidth, mHeight);
550 return;
551 }
552 }
553 if (width != stride) {
554 ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
555 width, stride);
556 }
557
558 // Calculate the buffer size for YUV420.
559 size_t bufferSize = (width * height * 12) / 8;
560 // Apply no white balance or exposure compensation.
561 float whiteBalance[] = {1.0f, 1.0f, 1.0f};
562 float exposureCompensation = 1.0f;
563 // Read video frame from webcam.
564 mCameraQemuClient.queryFrame(img, nullptr, bufferSize, 0, whiteBalance[0],
565 whiteBalance[1], whiteBalance[2],
566 exposureCompensation, timestamp);
567
568 ALOGVV("YUV420 sensor image captured");
569 }
570
captureYU12(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)571 void QemuSensor::captureYU12(uint32_t width, uint32_t height, uint32_t stride,
572 int64_t *timestamp, buffer_handle_t* handle) {
573 ATRACE_CALL();
574 status_t res;
575 if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
576 uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
577 res = mCameraQemuClient.queryStart();
578 if (res == NO_ERROR) {
579 mLastRequestWidth = width;
580 mLastRequestHeight = height;
581 ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
582 __FUNCTION__, (const char*) mDeviceName,
583 reinterpret_cast<const char*>(&pixFmt),
584 mWidth, mHeight);
585 mState = ECDS_STARTED;
586 } else {
587 ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
588 __FUNCTION__, (const char*) mDeviceName,
589 reinterpret_cast<const char*>(&pixFmt),
590 mWidth, mHeight);
591 return;
592 }
593 }
594 if (width != stride) {
595 ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
596 width, stride);
597 }
598
599 float whiteBalance[] = {1.0f, 1.0f, 1.0f};
600 float exposureCompensation = 1.0f;
601 const cb_handle_t* cb = cb_handle_t::from(*handle);
602 LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
603 const uint64_t offset = cb->getMmapedOffset();
604 mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_YUV420, offset,
605 whiteBalance[0], whiteBalance[1], whiteBalance[2],
606 exposureCompensation, timestamp);
607 ALOGVV("YUV420 sensor image captured");
608 }
609
610 }; // end of namespace android
611