• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2021 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
18 // *and* LOG_NNDEBUG to enable very verbose logging.
19 
20 //#define LOG_NDEBUG 0
21 //#define LOG_NNDEBUG 0
22 
23 #define LOG_TAG "EmulatedCamera3_CameraRotator"
24 #define ATRACE_TAG ATRACE_TAG_CAMERA
25 
26 #ifdef DEBUG_ROTATING_CAMERA
27 #define DDD(fmt,...) ALOGD("function: %s line: %d: " fmt, __func__, __LINE__, ##__VA_ARGS__);
28 #else
29 #define DDD(fmt,...) ((void)0)
30 #endif
31 
32 #include "CameraRotator.h"
33 #include "system/camera_metadata.h"
34 #include <gralloc_cb_bp.h>
35 
36 #include <cmath>
37 #include <cstdlib>
38 #include <linux/videodev2.h>
39 #include <log/log.h>
40 #include <cutils/properties.h>
41 #include <ui/Rect.h>
42 #include <utils/Trace.h>
43 
44 namespace android {
45 
46 const nsecs_t CameraRotator::kExposureTimeRange[2] =
47         {1000L, 300000000L};  // 1 us - 0.3 sec
48 const nsecs_t CameraRotator::kFrameDurationRange[2] =
49         {33331760L, 300000000L};  // ~1/30 s - 0.3 sec
50 const nsecs_t CameraRotator::kMinVerticalBlank = 10000L;
51 
52 const int32_t CameraRotator::kSensitivityRange[2] = {100, 1600};
53 const uint32_t CameraRotator::kDefaultSensitivity = 100;
54 
55 const char CameraRotator::kHostCameraVerString[] = "ro.boot.qemu.camera_protocol_ver";
56 
57 #define GRALLOC_PROP "ro.hardware.gralloc"
58 
getIsMinigbmFromProperty()59 static bool getIsMinigbmFromProperty() {
60     char grallocValue[PROPERTY_VALUE_MAX] = "";
61     property_get(GRALLOC_PROP, grallocValue, "");
62     bool isValid = grallocValue[0] != '\0';
63 
64     if (!isValid) return false;
65 
66     bool res = 0 == strcmp("minigbm", grallocValue);
67 
68     if (res) {
69         DDD("%s: Is using minigbm, in minigbm mode.\n", __func__);
70     } else {
71         DDD("%s: Is not using minigbm, in goldfish mode.\n", __func__);
72     }
73 
74     return res;
75 }
76 
CameraRotator(int width,int height)77 CameraRotator::CameraRotator(int width, int height):
78         Thread(false),
79         mWidth(width),
80         mHeight(height),
81         mActiveArray{0, 0, width, height},
82         mLastRequestWidth(-1),
83         mLastRequestHeight(-1),
84         mDeviceName("rotatingcamera"),
85         mGBA(&GraphicBufferAllocator::get()),
86         mGBM(nullptr),
87         mGotVSync(false),
88         mFrameDuration(kFrameDurationRange[0]),
89         mNextBuffers(nullptr),
90         mFrameNumber(0),
91         mCapturedBuffers(nullptr),
92         mListener(nullptr),
93         mIsMinigbm(getIsMinigbmFromProperty()) {
94     mHostCameraVer = 0; //property_get_int32(kHostCameraVerString, 0);
95     DDD("CameraRotator created with pixel array %d x %d", width, height);
96 }
97 
~CameraRotator()98 CameraRotator::~CameraRotator() {
99     shutDown();
100 }
101 
startUp()102 status_t CameraRotator::startUp() {
103     DDD("%s: Entered", __FUNCTION__);
104 
105     mCapturedBuffers = nullptr;
106     status_t res = run("EmulatedQemuCamera3::CameraRotator",
107             ANDROID_PRIORITY_URGENT_DISPLAY);
108 
109     if (res != OK) {
110         ALOGE("Unable to start up sensor capture thread: %d", res);
111     }
112 
113     mRender.connectDevice();
114 
115     mState = ECDS_CONNECTED;
116 
117     return res;
118 }
119 
shutDown()120 status_t CameraRotator::shutDown() {
121     DDD("%s: Entered", __FUNCTION__);
122 
123     status_t res = requestExitAndWait();
124     if (res != OK) {
125         ALOGE("Unable to shut down sensor capture thread: %d", res);
126     }
127 
128     if (res == NO_ERROR) {
129         mState = ECDS_CONNECTED;
130     }
131 
132     mRender.stopDevice();
133 
134     mRender.disconnectDevice();
135 
136     return res;
137 }
138 
setExposureTime(uint64_t ns)139 void CameraRotator::setExposureTime(uint64_t ns) {
140     (void)ns;
141 }
142 
setSensitivity(uint32_t gain)143 void CameraRotator::setSensitivity(uint32_t gain) {
144     (void)gain;
145 }
146 
setFrameDuration(uint64_t ns)147 void CameraRotator::setFrameDuration(uint64_t ns) {
148     Mutex::Autolock lock(mControlMutex);
149     DDD("Frame duration set to %f", ns/1000000.f);
150     mFrameDuration = ns;
151 }
152 
setDestinationBuffers(Buffers * buffers)153 void CameraRotator::setDestinationBuffers(Buffers *buffers) {
154     Mutex::Autolock lock(mControlMutex);
155     mNextBuffers = buffers;
156 }
157 
setFrameNumber(uint32_t frameNumber)158 void CameraRotator::setFrameNumber(uint32_t frameNumber) {
159     Mutex::Autolock lock(mControlMutex);
160     mFrameNumber = frameNumber;
161 }
162 
waitForVSync(nsecs_t reltime)163 bool CameraRotator::waitForVSync(nsecs_t reltime) {
164     int res;
165     Mutex::Autolock lock(mControlMutex);
166 
167     mGotVSync = false;
168     res = mVSync.waitRelative(mControlMutex, reltime);
169     if (res != OK && res != TIMED_OUT) {
170         ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
171         return false;
172     }
173     return mGotVSync;
174 }
175 
waitForNewFrame(nsecs_t reltime,nsecs_t * captureTime)176 bool CameraRotator::waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime) {
177     Mutex::Autolock lock(mReadoutMutex);
178     if (mCapturedBuffers == nullptr) {
179         int res;
180         res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
181         if (res == TIMED_OUT) {
182             return false;
183         } else if (res != OK || mCapturedBuffers == nullptr) {
184             ALOGE("Error waiting for sensor readout signal: %d", res);
185             return false;
186         }
187     }
188     mReadoutComplete.signal();
189 
190     *captureTime = mCaptureTime;
191     mCapturedBuffers = nullptr;
192     return true;
193 }
194 
~CameraRotatorListener()195 CameraRotator::CameraRotatorListener::~CameraRotatorListener() {
196 }
197 
setCameraRotatorListener(CameraRotatorListener * listener)198 void CameraRotator::setCameraRotatorListener(CameraRotatorListener *listener) {
199     Mutex::Autolock lock(mControlMutex);
200     mListener = listener;
201 }
202 
readyToRun()203 status_t CameraRotator::readyToRun() {
204     DDD("Starting up sensor thread");
205     mStartupTime = systemTime();
206     mNextCaptureTime = 0;
207     mNextCapturedBuffers = nullptr;
208     return OK;
209 }
210 
threadLoop()211 bool CameraRotator::threadLoop() {
212     ATRACE_CALL();
213     /*
214      * Stages are out-of-order relative to a single frame's processing, but
215      * in-order in time.
216      */
217 
218     /*
219      * Stage 1: Read in latest control parameters.
220      */
221     uint64_t frameDuration;
222     Buffers *nextBuffers;
223     uint32_t frameNumber;
224     CameraRotatorListener *listener = nullptr;
225     {
226         // Lock while we're grabbing readout variables.
227         Mutex::Autolock lock(mControlMutex);
228         frameDuration = mFrameDuration;
229         nextBuffers = mNextBuffers;
230         frameNumber = mFrameNumber;
231         listener = mListener;
232         // Don't reuse a buffer set.
233         mNextBuffers = nullptr;
234 
235         // Signal VSync for start of readout.
236         DDD("CameraRotator VSync");
237         mGotVSync = true;
238         mVSync.signal();
239     }
240 
241     /*
242      * Stage 3: Read out latest captured image.
243      */
244 
245     Buffers *capturedBuffers = nullptr;
246     nsecs_t captureTime = 0;
247 
248     nsecs_t startRealTime = systemTime();
249     /*
250      * Stagefright cares about system time for timestamps, so base simulated
251      * time on that.
252      */
253     nsecs_t simulatedTime = startRealTime;
254     nsecs_t frameEndRealTime = startRealTime + frameDuration;
255 
256     if (mNextCapturedBuffers != nullptr) {
257         DDD("CameraRotator starting readout");
258         /*
259          * Pretend we're doing readout now; will signal once enough time has
260          * elapsed.
261          */
262         capturedBuffers = mNextCapturedBuffers;
263         captureTime = mNextCaptureTime;
264     }
265 
266     /*
267      * TODO: Move this signal to another thread to simulate readout time
268      * properly.
269      */
270     if (capturedBuffers != nullptr) {
271         DDD("CameraRotator readout complete");
272         Mutex::Autolock lock(mReadoutMutex);
273         if (mCapturedBuffers != nullptr) {
274             DDD("Waiting for readout thread to catch up!");
275             mReadoutComplete.wait(mReadoutMutex);
276         }
277 
278         mCapturedBuffers = capturedBuffers;
279         mCaptureTime = captureTime;
280         mReadoutAvailable.signal();
281         capturedBuffers = nullptr;
282     }
283 
284     /*
285      * Stage 2: Capture new image.
286      */
287     mNextCaptureTime = simulatedTime;
288     mNextCapturedBuffers = nextBuffers;
289 
290     if (mNextCapturedBuffers != nullptr) {
291 
292         int64_t timestamp = 0L;
293 
294         // Might be adding more buffers, so size isn't constant.
295         for (size_t i = 0; i < mNextCapturedBuffers->size(); ++i) {
296             const StreamBuffer &b = (*mNextCapturedBuffers)[i];
297             DDD("CameraRotator capturing buffer %d: stream %d,"
298                     " %d x %d, format 0x%x, stride %d, buf %p, img %p",
299                     i, b.streamId, b.width, b.height, b.format, b.stride,
300                     b.buffer, b.img);
301             switch (b.format) {
302                 case HAL_PIXEL_FORMAT_RGB_888:
303                     captureRGB(b.img, b.width, b.height, b.stride, &timestamp);
304                     DDD("here fmt is HAL_PIXEL_FORMAT_RGB_888: 0x%x", HAL_PIXEL_FORMAT_RGB_888);
305                     break;
306                 case HAL_PIXEL_FORMAT_RGBA_8888:
307                     if (mHostCameraVer == 1 && !mIsMinigbm) {
308                         captureRGBA(b.width, b.height, b.stride, &timestamp, b.buffer);
309                     DDD("here fmt is HAL_PIXEL_FORMAT_RGBA_8888: 0x%x", HAL_PIXEL_FORMAT_RGBA_8888);
310                     } else {
311                         captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
312                     DDD("here fmt is HAL_PIXEL_FORMAT_RGBA_8888: 0x%x", HAL_PIXEL_FORMAT_RGBA_8888);
313                     }
314                     break;
315                 case HAL_PIXEL_FORMAT_BLOB:
316                     DDD("here fmt is HAL_PIXEL_FORMAT_BLOB : 0x%x", HAL_PIXEL_FORMAT_BLOB);
317                     if (b.dataSpace == HAL_DATASPACE_DEPTH) {
318                         ALOGE("%s: Depth clouds unsupported", __FUNCTION__);
319                     } else {
320                         /*
321                          * Add auxiliary buffer of the right size. Assumes only
322                          * one BLOB (JPEG) buffer is in mNextCapturedBuffers.
323                          */
324                     DDD("blobhere");
325                         StreamBuffer bAux;
326                         bAux.streamId = 0;
327                         bAux.width = b.width;
328                         bAux.height = b.height;
329                         bAux.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
330                         bAux.stride = b.width;
331                         if (mHostCameraVer == 1 && !mIsMinigbm) {
332                             const uint64_t usage =
333                                 GRALLOC_USAGE_HW_CAMERA_READ |
334                                 GRALLOC_USAGE_HW_CAMERA_WRITE |
335                                 GRALLOC_USAGE_HW_TEXTURE;
336                             const uint64_t graphicBufferId = 0; // not used
337                             const uint32_t layerCount = 1;
338                             buffer_handle_t handle;
339                             uint32_t stride;
340 
341                     DDD("allocate buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
342                             status_t status = mGBA->allocate(
343                                 bAux.width, bAux.height, bAux.format,
344                                 layerCount, usage,
345                                 &handle, &stride,
346                                 graphicBufferId, "CameraRotator");
347                             if (status != OK) {
348                                 LOG_ALWAYS_FATAL("allocate failed");
349                             }
350 
351                             android_ycbcr ycbcr = {};
352                             mGBM->lockYCbCr(handle,
353                                             GRALLOC_USAGE_HW_CAMERA_WRITE,
354                                             Rect(0, 0, bAux.width, bAux.height),
355                                             &ycbcr);
356 
357                             bAux.buffer = new buffer_handle_t;
358                             *bAux.buffer = handle;
359                             bAux.img = (uint8_t*)ycbcr.y;
360                         } else {
361                             bAux.buffer = nullptr;
362                             // TODO: Reuse these.
363                             bAux.img = new uint8_t[b.width * b.height * 3];
364                         }
365                         mNextCapturedBuffers->push_back(bAux);
366                     }
367                     break;
368                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
369                     if (mHostCameraVer == 1 && !mIsMinigbm) {
370                         captureYU12(b.width, b.height, b.stride, &timestamp, b.buffer);
371                         DDD("buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
372                     DDD("here");
373                     } else {
374                         DDD("buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
375                         captureYU12(b.img, b.width, b.height, b.stride, &timestamp);
376                     DDD("here");
377                     }
378                     break;
379                 default:
380                     ALOGE("%s: Unknown/unsupported format %x, no output",
381                             __FUNCTION__, b.format);
382                     break;
383             }
384         }
385         if (timestamp != 0UL) {
386           mNextCaptureTime = timestamp;
387         }
388         // Note: we have to do this after the actual capture so that the
389         // capture time is accurate as reported from QEMU.
390         if (listener != nullptr) {
391             listener->onCameraRotatorEvent(frameNumber, CameraRotatorListener::EXPOSURE_START,
392                                         mNextCaptureTime);
393         }
394     }
395 
396     DDD("CameraRotator vertical blanking interval");
397     nsecs_t workDoneRealTime = systemTime();
398     const nsecs_t timeAccuracy = 2e6;  // 2 ms of imprecision is ok.
399     if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
400         timespec t;
401         t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
402         t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
403 
404         int ret;
405         do {
406             ret = nanosleep(&t, &t);
407         } while (ret != 0);
408     }
409     DDD("Frame cycle took %d ms, target %d ms",
410             (int) ((systemTime() - startRealTime) / 1000000),
411             (int) (frameDuration / 1000000));
412     return true;
413 }
414 
captureRGBA(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)415 void CameraRotator::captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
416         uint32_t stride, int64_t *timestamp) {
417     ATRACE_CALL();
418     status_t res;
419     if (width != (uint32_t)mLastRequestWidth ||
420         height != (uint32_t)mLastRequestHeight) {
421         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
422               "from the previous request (%dx%d). Restarting camera",
423                 __FUNCTION__, width, height, mLastRequestWidth,
424                 mLastRequestHeight);
425 
426         if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
427             // We only need to stop the camera if this isn't the first request.
428 
429             // Stop the camera device.
430             res = queryStop();
431             if (res == NO_ERROR) {
432                 mState = ECDS_CONNECTED;
433                 DDD("%s: Qemu camera device '%s' is stopped",
434                         __FUNCTION__, (const char*) mDeviceName);
435             } else {
436                 ALOGE("%s: Unable to stop device '%s'",
437                         __FUNCTION__, (const char*) mDeviceName);
438             }
439         }
440 
441         /*
442          * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
443          * and asks for the video format from the pixFmt parameter, which is
444          * V4L2_PIX_FMT_YUV420 in our implementation.
445          */
446         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
447         res = queryStart(pixFmt, width, height);
448         if (res == NO_ERROR) {
449             mLastRequestWidth = width;
450             mLastRequestHeight = height;
451             DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
452                     __FUNCTION__, (const char*) mDeviceName,
453                     reinterpret_cast<const char*>(&pixFmt),
454                     mWidth, mHeight);
455             mState = ECDS_STARTED;
456         } else {
457             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
458                     __FUNCTION__, (const char*) mDeviceName,
459                     reinterpret_cast<const char*>(&pixFmt),
460                     mWidth, mHeight);
461             return;
462         }
463     }
464     if (width != stride) {
465         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
466               width, stride);
467     }
468 
469     // Since the format is V4L2_PIX_FMT_RGB32, we need 4 bytes per pixel.
470       size_t bufferSize = width * height * 4;
471       // Apply no white balance or exposure compensation.
472       float whiteBalance[] = {1.0f, 1.0f, 1.0f};
473       float exposureCompensation = 1.0f;
474       // Read from webcam.
475       queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
476               whiteBalance[1], whiteBalance[2],
477               exposureCompensation, timestamp);
478 
479     DDD("RGBA sensor image captured");
480 }
481 
captureRGBA(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)482 void CameraRotator::captureRGBA(uint32_t width, uint32_t height,
483         uint32_t stride, int64_t *timestamp, buffer_handle_t* handle) {
484     ATRACE_CALL();
485     status_t res;
486     if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
487         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
488         res = queryStart();
489         if (res == NO_ERROR) {
490             mLastRequestWidth = width;
491             mLastRequestHeight = height;
492             DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
493                     __FUNCTION__, (const char*) mDeviceName,
494                     reinterpret_cast<const char*>(&pixFmt),
495                     mWidth, mHeight);
496             mState = ECDS_STARTED;
497         } else {
498             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
499                     __FUNCTION__, (const char*) mDeviceName,
500                     reinterpret_cast<const char*>(&pixFmt),
501                     mWidth, mHeight);
502             return;
503         }
504     }
505     if (width != stride) {
506         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
507               width, stride);
508     }
509 
510     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
511     float exposureCompensation = 1.0f;
512     const cb_handle_t* cb = cb_handle_t::from(*handle);
513     LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
514     const uint64_t offset = cb->getMmapedOffset();
515     queryFrame(width, height, V4L2_PIX_FMT_RGB32, offset,
516                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
517                                  exposureCompensation, timestamp);
518 
519     DDD("RGBA sensor image captured");
520 }
521 
captureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)522 void CameraRotator::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
523     ALOGE("%s: Not implemented", __FUNCTION__);
524 }
525 
captureYU12(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp)526 void CameraRotator::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride,
527                              int64_t *timestamp) {
528     ATRACE_CALL();
529     status_t res;
530     if (width != (uint32_t)mLastRequestWidth ||
531         height != (uint32_t)mLastRequestHeight) {
532         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
533               "from the previous request (%dx%d). Restarting camera",
534                 __FUNCTION__, width, height, mLastRequestWidth,
535                 mLastRequestHeight);
536 
537         if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
538             // We only need to stop the camera if this isn't the first request.
539             // Stop the camera device.
540             res = queryStop();
541             if (res == NO_ERROR) {
542                 mState = ECDS_CONNECTED;
543                 DDD("%s: Qemu camera device '%s' is stopped",
544                         __FUNCTION__, (const char*) mDeviceName);
545             } else {
546                 ALOGE("%s: Unable to stop device '%s'",
547                         __FUNCTION__, (const char*) mDeviceName);
548             }
549         }
550 
551         /*
552          * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
553          * and asks for the video format from the pixFmt parameter, which is
554          * V4L2_PIX_FMT_YUV420 in our implementation.
555          */
556         uint32_t pixFmt = mIsMinigbm ? V4L2_PIX_FMT_NV12 : V4L2_PIX_FMT_YUV420;
557         res = queryStart(pixFmt, width, height);
558         if (res == NO_ERROR) {
559             mLastRequestWidth = width;
560             mLastRequestHeight = height;
561             DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
562                     __FUNCTION__, (const char*) mDeviceName,
563                     reinterpret_cast<const char*>(&pixFmt),
564                     mWidth, mHeight);
565             mState = ECDS_STARTED;
566         } else {
567             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
568                     __FUNCTION__, (const char*) mDeviceName,
569                     reinterpret_cast<const char*>(&pixFmt),
570                     mWidth, mHeight);
571             return;
572         }
573     }
574     if (width != stride) {
575         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
576               width, stride);
577     }
578 
579     // Calculate the buffer size for YUV420.
580     size_t bufferSize = (width * height * 12) / 8;
581     // Apply no white balance or exposure compensation.
582     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
583     float exposureCompensation = 1.0f;
584     // Read video frame from webcam.
585     mRender.startDevice(width, height, HAL_PIXEL_FORMAT_YCbCr_420_888);
586     queryFrame(img, nullptr, bufferSize, 0, whiteBalance[0],
587             whiteBalance[1], whiteBalance[2],
588             exposureCompensation, timestamp);
589 
590     DDD("YUV420 sensor image captured");
591 }
592 
captureYU12(uint32_t width,uint32_t height,uint32_t stride,int64_t * timestamp,buffer_handle_t * handle)593 void CameraRotator::captureYU12(uint32_t width, uint32_t height, uint32_t stride,
594                              int64_t *timestamp, buffer_handle_t* handle) {
595     ATRACE_CALL();
596     status_t res;
597     if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
598         uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
599         res = queryStart();
600         if (res == NO_ERROR) {
601             mLastRequestWidth = width;
602             mLastRequestHeight = height;
603             DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
604                     __FUNCTION__, (const char*) mDeviceName,
605                     reinterpret_cast<const char*>(&pixFmt),
606                     mWidth, mHeight);
607             mState = ECDS_STARTED;
608         } else {
609             ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
610                     __FUNCTION__, (const char*) mDeviceName,
611                     reinterpret_cast<const char*>(&pixFmt),
612                     mWidth, mHeight);
613             return;
614         }
615     }
616     if (width != stride) {
617         ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
618               width, stride);
619     }
620 
621     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
622     float exposureCompensation = 1.0f;
623     const cb_handle_t* cb = cb_handle_t::from(*handle);
624     LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
625     const uint64_t offset = cb->getMmapedOffset();
626     queryFrame(width, height, V4L2_PIX_FMT_YUV420, offset,
627                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
628                                  exposureCompensation, timestamp);
629     DDD("YUV420 sensor image captured");
630 }
631 
queryFrame(void * vframe,void * pframe,size_t vframe_size,size_t pframe_size,float r_scale,float g_scale,float b_scale,float exposure_comp,int64_t * frame_time)632 status_t CameraRotator::queryFrame(void* vframe,
633                         void* pframe,
634                         size_t vframe_size,
635                         size_t pframe_size,
636                         float r_scale,
637                         float g_scale,
638                         float b_scale,
639                         float exposure_comp,
640                         int64_t* frame_time) {
641     if (vframe) {
642         DDD("hubo: capture video frames");
643         mRender.produceFrame(vframe, frame_time);
644     } else if (pframe) {
645         DDD("hubo: capture preview frames");
646     } else {
647     }
648     return NO_ERROR;
649 }
650 
queryFrame(int wdith,int height,uint32_t pixel_format,uint64_t offset,float r_scale,float g_scale,float b_scale,float exposure_comp,int64_t * frame_time)651 status_t CameraRotator::queryFrame(int wdith,
652                         int height,
653                         uint32_t pixel_format,
654                         uint64_t offset,
655                         float r_scale,
656                         float g_scale,
657                         float b_scale,
658                         float exposure_comp,
659                         int64_t* frame_time) {
660     return NO_ERROR;
661 }
662 
queryStop()663 status_t CameraRotator::queryStop() {
664     return NO_ERROR;
665 }
666 
queryStart()667 status_t CameraRotator::queryStart() {
668     return NO_ERROR;
669 }
670 
queryStart(uint32_t fmt,int w,int h)671 status_t CameraRotator::queryStart(uint32_t fmt, int w, int h) {
672     (void)fmt;
673     (void)w;
674     (void)h;
675     return NO_ERROR;
676 }
677 
678 }; // end of namespace android
679