• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
18 // *and* LOG_NNDEBUG to enable very verbose logging.
19 
20 //#define LOG_NDEBUG 0
21 //#define LOG_NNDEBUG 0
22 
23 #define LOG_TAG "EmulatedCamera3_QemuSensor"
24 #define ATRACE_TAG ATRACE_TAG_CAMERA
25 
26 #ifdef LOG_NNDEBUG
27 #define ALOGVV(...) ALOGV(__VA_ARGS__)
28 #else
29 #define ALOGVV(...) ((void)0)
30 #endif
31 
32 #include "qemu-pipeline3/QemuSensor.h"
33 #include "system/camera_metadata.h"
34 #include <gralloc_cb_bp.h>
35 
36 #include <cmath>
37 #include <cstdlib>
38 #include <linux/videodev2.h>
39 #include <log/log.h>
40 #include <cutils/properties.h>
41 #include <ui/Rect.h>
42 #include <utils/Trace.h>
43 
44 namespace android {
45 
46 const nsecs_t QemuSensor::kExposureTimeRange[2] =
47         {1000L, 300000000L};  // 1 us - 0.3 sec
48 const nsecs_t QemuSensor::kFrameDurationRange[2] =
49         {33331760L, 300000000L};  // ~1/30 s - 0.3 sec
50 const nsecs_t QemuSensor::kMinVerticalBlank = 10000L;
51 
52 const int32_t QemuSensor::kSensitivityRange[2] = {100, 1600};
53 const uint32_t QemuSensor::kDefaultSensitivity = 100;
54 
55 const char QemuSensor::kHostCameraVerString[] = "ro.boot.qemu.camera_protocol_ver";
56 
57 #define GRALLOC_PROP "ro.hardware.gralloc"
58 
getIsMinigbmFromProperty()59 static bool getIsMinigbmFromProperty() {
60     char grallocValue[PROPERTY_VALUE_MAX] = "";
61     property_get(GRALLOC_PROP, grallocValue, "");
62     bool isValid = grallocValue[0] != '\0';
63 
64     if (!isValid) return false;
65 
66     bool res = 0 == strcmp("minigbm", grallocValue);
67 
68     if (res) {
69         ALOGV("%s: Is using minigbm, in minigbm mode.\n", __func__);
70     } else {
71         ALOGV("%s: Is not using minigbm, in goldfish mode.\n", __func__);
72     }
73 
74     return res;
75 }
76 
QemuSensor(const char * deviceName,uint32_t width,uint32_t height,GraphicBufferMapper * gbm)77 QemuSensor::QemuSensor(const char *deviceName, uint32_t width, uint32_t height,
78                        GraphicBufferMapper* gbm):
79         Thread(false),
80         mWidth(width),
81         mHeight(height),
82         mActiveArray{0, 0, width, height},
83         mLastRequestWidth(-1),
84         mLastRequestHeight(-1),
85         mCameraQemuClient(),
86         mDeviceName(deviceName),
87         mGBA(&GraphicBufferAllocator::get()),
88         mGBM(gbm),
89         mGotVSync(false),
90         mFrameDuration(kFrameDurationRange[0]),
91         mNextBuffers(nullptr),
92         mFrameNumber(0),
93         mCapturedBuffers(nullptr),
94         mListener(nullptr),
95         mIsMinigbm(getIsMinigbmFromProperty()) {
96     mHostCameraVer = property_get_int32(kHostCameraVerString, 0);
97     ALOGV("QemuSensor created with pixel array %d x %d", width, height);
98 }
99 
~QemuSensor()100 QemuSensor::~QemuSensor() {
101     shutDown();
102 }
103 
startUp()104 status_t QemuSensor::startUp() {
105     ALOGV("%s: Entered", __FUNCTION__);
106 
107     mCapturedBuffers = nullptr;
108     status_t res = run("EmulatedQemuCamera3::QemuSensor",
109             ANDROID_PRIORITY_URGENT_DISPLAY);
110 
111     if (res != OK) {
112         ALOGE("Unable to start up sensor capture thread: %d", res);
113     }
114 
115     char connect_str[256];
116     snprintf(connect_str, sizeof(connect_str), "name=%s", mDeviceName);
117     res = mCameraQemuClient.connectClient(connect_str);
118     if (res != NO_ERROR) {
119         return res;
120     }
121 
122     res = mCameraQemuClient.queryConnect();
123     if (res == NO_ERROR) {
124         ALOGV("%s: Connected to device '%s'",
125                 __FUNCTION__, (const char*) mDeviceName);
126         mState = ECDS_CONNECTED;
127     } else {
128         ALOGE("%s: Connection to device '%s' failed",
129                 __FUNCTION__, (const char*) mDeviceName);
130     }
131 
132     return res;
133 }
134 
shutDown()135 status_t QemuSensor::shutDown() {
136     ALOGV("%s: Entered", __FUNCTION__);
137 
138     status_t res = requestExitAndWait();
139     if (res != OK) {
140         ALOGE("Unable to shut down sensor capture thread: %d", res);
141     }
142 
143     /* Stop the actual camera device. */
144     res = mCameraQemuClient.queryStop();
145     if (res == NO_ERROR) {
146         mState = ECDS_CONNECTED;
147         ALOGV("%s: Qemu camera device '%s' is stopped",
148                 __FUNCTION__, (const char*) mDeviceName);
149     } else {
150         ALOGE("%s: Unable to stop device '%s'",
151                 __FUNCTION__, (const char*) mDeviceName);
152     }
153 
154     return res;
155 }
156 
setFrameDuration(uint64_t ns)157 void QemuSensor::setFrameDuration(uint64_t ns) {
158     Mutex::Autolock lock(mControlMutex);
159     ALOGVV("Frame duration set to %f", ns/1000000.f);
160     mFrameDuration = ns;
161 }
162 
setDestinationBuffers(Buffers * buffers)163 void QemuSensor::setDestinationBuffers(Buffers *buffers) {
164     Mutex::Autolock lock(mControlMutex);
165     mNextBuffers = buffers;
166 }
167 
setFrameNumber(uint32_t frameNumber)168 void QemuSensor::setFrameNumber(uint32_t frameNumber) {
169     Mutex::Autolock lock(mControlMutex);
170     mFrameNumber = frameNumber;
171 }
172 
waitForVSync(nsecs_t reltime)173 bool QemuSensor::waitForVSync(nsecs_t reltime) {
174     int res;
175     Mutex::Autolock lock(mControlMutex);
176 
177     mGotVSync = false;
178     res = mVSync.waitRelative(mControlMutex, reltime);
179     if (res != OK && res != TIMED_OUT) {
180         ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
181         return false;
182     }
183     return mGotVSync;
184 }
185 
waitForNewFrame(nsecs_t reltime,nsecs_t * captureTime)186 bool QemuSensor::waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime) {
187     Mutex::Autolock lock(mReadoutMutex);
188     if (mCapturedBuffers == nullptr) {
189         int res;
190         res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
191         if (res == TIMED_OUT) {
192             return false;
193         } else if (res != OK || mCapturedBuffers == nullptr) {
194             ALOGE("Error waiting for sensor readout signal: %d", res);
195             return false;
196         }
197     }
198     mReadoutComplete.signal();
199 
200     *captureTime = mCaptureTime;
201     mCapturedBuffers = nullptr;
202     return true;
203 }
204 
~QemuSensorListener()205 QemuSensor::QemuSensorListener::~QemuSensorListener() {
206 }
207 
setQemuSensorListener(QemuSensorListener * listener)208 void QemuSensor::setQemuSensorListener(QemuSensorListener *listener) {
209     Mutex::Autolock lock(mControlMutex);
210     mListener = listener;
211 }
212 
readyToRun()213 status_t QemuSensor::readyToRun() {
214     ALOGV("Starting up sensor thread");
215     mStartupTime = systemTime();
216     mNextCaptureTime = 0;
217     mNextCapturedBuffers = nullptr;
218     return OK;
219 }
220 
threadLoop()221 bool QemuSensor::threadLoop() {
222     ATRACE_CALL();
223     /*
224      * Stages are out-of-order relative to a single frame's processing, but
225      * in-order in time.
226      */
227 
228     /*
229      * Stage 1: Read in latest control parameters.
230      */
231     uint64_t frameDuration;
232     Buffers *nextBuffers;
233     uint32_t frameNumber;
234     QemuSensorListener *listener = nullptr;
235     {
236         // Lock while we're grabbing readout variables.
237         Mutex::Autolock lock(mControlMutex);
238         frameDuration = mFrameDuration;
239         nextBuffers = mNextBuffers;
240         frameNumber = mFrameNumber;
241         listener = mListener;
242         // Don't reuse a buffer set.
243         mNextBuffers = nullptr;
244 
245         // Signal VSync for start of readout.
246         ALOGVV("QemuSensor VSync");
247         mGotVSync = true;
248         mVSync.signal();
249     }
250 
251     /*
252      * Stage 3: Read out latest captured image.
253      */
254 
255     Buffers *capturedBuffers = nullptr;
256     nsecs_t captureTime = 0;
257 
258     nsecs_t startRealTime = systemTime();
259     /*
260      * Stagefright cares about system time for timestamps, so base simulated
261      * time on that.
262      */
263     nsecs_t simulatedTime = startRealTime;
264     nsecs_t frameEndRealTime = startRealTime + frameDuration;
265 
266     if (mNextCapturedBuffers != nullptr) {
267         ALOGVV("QemuSensor starting readout");
268         /*
269          * Pretend we're doing readout now; will signal once enough time has
270          * elapsed.
271          */
272         capturedBuffers = mNextCapturedBuffers;
273         captureTime = mNextCaptureTime;
274     }
275 
276     /*
277      * TODO: Move this signal to another thread to simulate readout time
278      * properly.
279      */
280     if (capturedBuffers != nullptr) {
281         ALOGVV("QemuSensor readout complete");
282         Mutex::Autolock lock(mReadoutMutex);
283         if (mCapturedBuffers != nullptr) {
284             ALOGV("Waiting for readout thread to catch up!");
285             mReadoutComplete.wait(mReadoutMutex);
286         }
287 
288         mCapturedBuffers = capturedBuffers;
289         mCaptureTime = captureTime;
290         mReadoutAvailable.signal();
291         capturedBuffers = nullptr;
292     }
293 
294     /*
295      * Stage 2: Capture new image.
296      */
297     mNextCaptureTime = simulatedTime;
298     mNextCapturedBuffers = nextBuffers;
299 
300     if (mNextCapturedBuffers != nullptr) {
301 
302         int64_t timestamp = 0L;
303 
304         // Might be adding more buffers, so size isn't constant.
305         for (size_t i = 0; i < mNextCapturedBuffers->size(); ++i) {
306             const StreamBuffer &b = (*mNextCapturedBuffers)[i];
307             ALOGVV("QemuSensor capturing buffer %d: stream %d,"
308                     " %d x %d, format %x, stride %d, buf %p, img %p",
309                     i, b.streamId, b.width, b.height, b.format, b.stride,
310                     b.buffer, b.img);
311             switch (b.format) {
312                 case HAL_PIXEL_FORMAT_RGB_888:
313                     captureRGB(b.img, b.width, b.height, b.stride, &timestamp);
314                     break;
315                 case HAL_PIXEL_FORMAT_RGBA_8888:
316                     if (mHostCameraVer == 1 && !mIsMinigbm) {
317                         captureRGBA(b.width, b.height, b.stride, &timestamp, b.buffer);
318                     } else {
319                         captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
320                     }
321                     break;
322                 case HAL_PIXEL_FORMAT_BLOB:
323                     if (b.dataSpace == HAL_DATASPACE_DEPTH) {
324                         ALOGE("%s: Depth clouds unsupported", __FUNCTION__);
325                     } else {
326                         /*
327                          * Add auxillary buffer of the right size. Assumes only
328                          * one BLOB (JPEG) buffer is in mNextCapturedBuffers.
329                          */
330                         StreamBuffer bAux;
331                         bAux.streamId = 0;
332                         bAux.width = b.width;
333                         bAux.height = b.height;
334                         bAux.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
335                         bAux.stride = b.width;
336                         if (mHostCameraVer == 1 && !mIsMinigbm) {
337                             const uint64_t usage =
338                                 GRALLOC_USAGE_HW_CAMERA_READ |
339                                 GRALLOC_USAGE_HW_CAMERA_WRITE |
340                                 GRALLOC_USAGE_HW_TEXTURE;
341                             const uint64_t graphicBufferId = 0; // not used
342                             const uint32_t layerCount = 1;
343                             buffer_handle_t handle;
344                             uint32_t stride;
345 
346                             status_t status = mGBA->allocate(
347                                 bAux.width, bAux.height, bAux.format,
348                                 layerCount, usage,
349                                 &handle, &stride,
350                                 graphicBufferId, "QemuSensor");
351                             if (status != OK) {
352                                 LOG_ALWAYS_FATAL("allocate failed");
353                             }
354 
355                             android_ycbcr ycbcr = {};
356                             mGBM->lockYCbCr(handle,
357                                             GRALLOC_USAGE_HW_CAMERA_WRITE,
358                                             Rect(0, 0, bAux.width, bAux.height),
359                                             &ycbcr);
360 
361                             bAux.buffer = new buffer_handle_t;
362                             *bAux.buffer = handle;
363                             bAux.img = (uint8_t*)ycbcr.y;
364                         } else {
365                             bAux.buffer = nullptr;
366                             // TODO: Reuse these.
367                             bAux.img = new uint8_t[b.width * b.height * 3];
368                         }
369                         mNextCapturedBuffers->push_back(bAux);
370                     }
371                     break;
372                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
373                     if (mHostCameraVer == 1 && !mIsMinigbm) {
374                         captureYU12(b.width, b.height, b.stride, &timestamp, b.buffer);
375                     } else {
376                         captureYU12(b.img, b.width, b.height, b.stride, &timestamp);
377                     }
378                     break;
379                 default:
380                     ALOGE("%s: Unknown/unsupported format %x, no output",
381                             __FUNCTION__, b.format);
382                     break;
383             }
384         }
385         if (timestamp != 0UL) {
386           mNextCaptureTime = timestamp;
387         }
388         // Note: we have to do this after the actual capture so that the
389         // capture time is accurate as reported from QEMU.
390         if (listener != nullptr) {
391             listener->onQemuSensorEvent(frameNumber, QemuSensorListener::EXPOSURE_START,
392                                         mNextCaptureTime);
393         }
394     }
395 
396     ALOGVV("QemuSensor vertical blanking interval");
397     nsecs_t workDoneRealTime = systemTime();
398     const nsecs_t timeAccuracy = 2e6;  // 2 ms of imprecision is ok.
399     if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
400         timespec t;
401         t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
402         t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
403 
404         int ret;
405         do {
406             ret = nanosleep(&t, &t);
407         } while (ret != 0);
408     }
409     ALOGVV("Frame cycle took %d ms, target %d ms",
410             (int) ((systemTime() - startRealTime) / 1000000),
411             (int) (frameDuration / 1000000));
412     return true;
413 }
414 
captureRGBA(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)415 void QemuSensor::captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
416         uint32_t stride, int64_t *timestamp) {
417     ATRACE_CALL();
418     status_t res;
419     if (width != (uint32_t)mLastRequestWidth ||
420         height != (uint32_t)mLastRequestHeight) {
421         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
422               "from the previous request (%dx%d). Restarting camera",
423                 __FUNCTION__, width, height, mLastRequestWidth,
424                 mLastRequestHeight);
425 
426         if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
427             // We only need to stop the camera if this isn't the first request.
428 
429             // Stop the camera device.
430             res = mCameraQemuClient.queryStop();
431             if (res == NO_ERROR) {
432                 mState = ECDS_CONNECTED;
433                 ALOGV("%s: Qemu camera device '%s' is stopped",
434                         __FUNCTION__, (const char*) mDeviceName);
435             } else {
436                 ALOGE("%s: Unable to stop device '%s'",
437                         __FUNCTION__, (const char*) mDeviceName);
438             }
439         }
440 
441         /*
442          * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
443          * and asks for the video format from the pixFmt parameter, which is
444          * V4L2_PIX_FMT_YUV420 in our implementation.
445          */
446         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
447         res = mCameraQemuClient.queryStart(pixFmt, width, height);
448         if (res == NO_ERROR) {
449             mLastRequestWidth = width;
450             mLastRequestHeight = height;
451             ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
452                     __FUNCTION__, (const char*) mDeviceName,
453                     reinterpret_cast<const char*>(&pixFmt),
454                     mWidth, mHeight);
455             mState = ECDS_STARTED;
456         } else {
457             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
458                     __FUNCTION__, (const char*) mDeviceName,
459                     reinterpret_cast<const char*>(&pixFmt),
460                     mWidth, mHeight);
461             return;
462         }
463     }
464     if (width != stride) {
465         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
466               width, stride);
467     }
468 
469     // Since the format is V4L2_PIX_FMT_RGB32, we need 4 bytes per pixel.
470       size_t bufferSize = width * height * 4;
471       // Apply no white balance or exposure compensation.
472       float whiteBalance[] = {1.0f, 1.0f, 1.0f};
473       float exposureCompensation = 1.0f;
474       // Read from webcam.
475       mCameraQemuClient.queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
476               whiteBalance[1], whiteBalance[2],
477               exposureCompensation, timestamp);
478 
479     ALOGVV("RGBA sensor image captured");
480 }
481 
captureRGBA(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)482 void QemuSensor::captureRGBA(uint32_t width, uint32_t height,
483         uint32_t stride, int64_t *timestamp, buffer_handle_t* handle) {
484     ATRACE_CALL();
485     status_t res;
486     if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
487         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
488         res = mCameraQemuClient.queryStart();
489         if (res == NO_ERROR) {
490             mLastRequestWidth = width;
491             mLastRequestHeight = height;
492             ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
493                     __FUNCTION__, (const char*) mDeviceName,
494                     reinterpret_cast<const char*>(&pixFmt),
495                     mWidth, mHeight);
496             mState = ECDS_STARTED;
497         } else {
498             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
499                     __FUNCTION__, (const char*) mDeviceName,
500                     reinterpret_cast<const char*>(&pixFmt),
501                     mWidth, mHeight);
502             return;
503         }
504     }
505     if (width != stride) {
506         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
507               width, stride);
508     }
509 
510     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
511     float exposureCompensation = 1.0f;
512     const cb_handle_t* cb = cb_handle_t::from(*handle);
513     LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
514     const uint64_t offset = cb->getMmapedOffset();
515     mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_RGB32, offset,
516                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
517                                  exposureCompensation, timestamp);
518 
519     ALOGVV("RGBA sensor image captured");
520 }
521 
captureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)522 void QemuSensor::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
523     ALOGE("%s: Not implemented", __FUNCTION__);
524 }
525 
captureYU12(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)526 void QemuSensor::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride,
527                              int64_t *timestamp) {
528     ATRACE_CALL();
529     status_t res;
530     if (width != (uint32_t)mLastRequestWidth ||
531         height != (uint32_t)mLastRequestHeight) {
532         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
533               "from the previous request (%dx%d). Restarting camera",
534                 __FUNCTION__, width, height, mLastRequestWidth,
535                 mLastRequestHeight);
536 
537         if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
538             // We only need to stop the camera if this isn't the first request.
539             // Stop the camera device.
540             res = mCameraQemuClient.queryStop();
541             if (res == NO_ERROR) {
542                 mState = ECDS_CONNECTED;
543                 ALOGV("%s: Qemu camera device '%s' is stopped",
544                         __FUNCTION__, (const char*) mDeviceName);
545             } else {
546                 ALOGE("%s: Unable to stop device '%s'",
547                         __FUNCTION__, (const char*) mDeviceName);
548             }
549         }
550 
551         /*
552          * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
553          * and asks for the video format from the pixFmt parameter, which is
554          * V4L2_PIX_FMT_YUV420 in our implementation.
555          */
556         uint32_t pixFmt = mIsMinigbm ? V4L2_PIX_FMT_NV12 : V4L2_PIX_FMT_YUV420;
557         res = mCameraQemuClient.queryStart(pixFmt, width, height);
558         if (res == NO_ERROR) {
559             mLastRequestWidth = width;
560             mLastRequestHeight = height;
561             ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
562                     __FUNCTION__, (const char*) mDeviceName,
563                     reinterpret_cast<const char*>(&pixFmt),
564                     mWidth, mHeight);
565             mState = ECDS_STARTED;
566         } else {
567             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
568                     __FUNCTION__, (const char*) mDeviceName,
569                     reinterpret_cast<const char*>(&pixFmt),
570                     mWidth, mHeight);
571             return;
572         }
573     }
574     if (width != stride) {
575         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
576               width, stride);
577     }
578 
579     // Calculate the buffer size for YUV420.
580     size_t bufferSize = (width * height * 12) / 8;
581     // Apply no white balance or exposure compensation.
582     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
583     float exposureCompensation = 1.0f;
584     // Read video frame from webcam.
585     mCameraQemuClient.queryFrame(img, nullptr, bufferSize, 0, whiteBalance[0],
586             whiteBalance[1], whiteBalance[2],
587             exposureCompensation, timestamp);
588 
589     ALOGVV("YUV420 sensor image captured");
590 }
591 
captureYU12(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)592 void QemuSensor::captureYU12(uint32_t width, uint32_t height, uint32_t stride,
593                              int64_t *timestamp, buffer_handle_t* handle) {
594     ATRACE_CALL();
595     status_t res;
596     if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
597         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
598         res = mCameraQemuClient.queryStart();
599         if (res == NO_ERROR) {
600             mLastRequestWidth = width;
601             mLastRequestHeight = height;
602             ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
603                     __FUNCTION__, (const char*) mDeviceName,
604                     reinterpret_cast<const char*>(&pixFmt),
605                     mWidth, mHeight);
606             mState = ECDS_STARTED;
607         } else {
608             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
609                     __FUNCTION__, (const char*) mDeviceName,
610                     reinterpret_cast<const char*>(&pixFmt),
611                     mWidth, mHeight);
612             return;
613         }
614     }
615     if (width != stride) {
616         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
617               width, stride);
618     }
619 
620     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
621     float exposureCompensation = 1.0f;
622     const cb_handle_t* cb = cb_handle_t::from(*handle);
623     LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
624     const uint64_t offset = cb->getMmapedOffset();
625     mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_YUV420, offset,
626                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
627                                  exposureCompensation, timestamp);
628     ALOGVV("YUV420 sensor image captured");
629 }
630 
631 }; // end of namespace android
632