1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of an abstract class EmulatedCameraDevice that defines
19 * functionality expected from an emulated physical camera device:
20 * - Obtaining and setting camera parameters
21 * - Capturing frames
22 * - Streaming video
23 * - etc.
24 */
25
26 #define LOG_NDEBUG 0
27 #define LOG_TAG "EmulatedCamera_Device"
28 #include <log/log.h>
29 #include <sys/select.h>
30 #include <cmath>
31 #include "Alignment.h"
32 #include "EmulatedCamera.h"
33 #include "EmulatedCameraDevice.h"
34
35 #undef min
36 #undef max
37 #include <algorithm>
38
39 namespace android {
40
41 const float GAMMA_CORRECTION = 2.2f;
EmulatedCameraDevice(EmulatedCamera * camera_hal)42 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
43 : mObjectLock(),
44 mCameraHAL(camera_hal),
45 mExposureCompensation(1.0f),
46 mWhiteBalanceScale(NULL),
47 mSupportedWhiteBalanceScale(),
48 mState(ECDS_CONSTRUCTED),
49 mTriggerAutoFocus(false)
50 {
51 }
52
~EmulatedCameraDevice()53 EmulatedCameraDevice::~EmulatedCameraDevice()
54 {
55 ALOGV("EmulatedCameraDevice destructor");
56 for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
57 if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
58 delete[] mSupportedWhiteBalanceScale.valueAt(i);
59 }
60 }
61 }
62
63 /****************************************************************************
64 * Emulated camera device public API
65 ***************************************************************************/
66
Initialize()67 status_t EmulatedCameraDevice::Initialize()
68 {
69 if (isInitialized()) {
70 ALOGW("%s: Emulated camera device is already initialized: mState = %d",
71 __FUNCTION__, mState);
72 return NO_ERROR;
73 }
74
75 mState = ECDS_INITIALIZED;
76
77 return NO_ERROR;
78 }
79
startDeliveringFrames(bool one_burst)80 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
81 {
82 ALOGV("%s", __FUNCTION__);
83
84 if (!isStarted()) {
85 ALOGE("%s: Device is not started", __FUNCTION__);
86 return EINVAL;
87 }
88
89 /* Frames will be delivered from the thread routine. */
90 const status_t res = startWorkerThread(one_burst);
91 ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
92 return res;
93 }
94
stopDeliveringFrames()95 status_t EmulatedCameraDevice::stopDeliveringFrames()
96 {
97 ALOGV("%s", __FUNCTION__);
98
99 if (!isStarted()) {
100 ALOGW("%s: Device is not started", __FUNCTION__);
101 return NO_ERROR;
102 }
103
104 const status_t res = stopWorkerThread();
105 ALOGE_IF(res != NO_ERROR, "%s: stopWorkerThread failed", __FUNCTION__);
106 return res;
107 }
108
setPreviewFrameRate(int framesPerSecond)109 status_t EmulatedCameraDevice::setPreviewFrameRate(int framesPerSecond) {
110 if (framesPerSecond <= 0) {
111 return EINVAL;
112 }
113 mFramesPerSecond = framesPerSecond;
114 return NO_ERROR;
115 }
116
setExposureCompensation(const float ev)117 void EmulatedCameraDevice::setExposureCompensation(const float ev) {
118 ALOGV("%s", __FUNCTION__);
119
120 if (!isStarted()) {
121 ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
122 }
123
124 mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
125 ALOGV("New exposure compensation is %f", mExposureCompensation);
126 }
127
initializeWhiteBalanceModes(const char * mode,const float r_scale,const float b_scale)128 void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
129 const float r_scale,
130 const float b_scale) {
131 ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
132 float* value = new float[3];
133 value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
134 mSupportedWhiteBalanceScale.add(String8(mode), value);
135 }
136
setWhiteBalanceMode(const char * mode)137 void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
138 ALOGV("%s with white balance %s", __FUNCTION__, mode);
139 mWhiteBalanceScale =
140 mSupportedWhiteBalanceScale.valueFor(String8(mode));
141 }
142
143 /* Computes the pixel value after adjusting the white balance to the current
144 * one. The input the y, u, v channel of the pixel and the adjusted value will
145 * be stored in place. The adjustment is done in RGB space.
146 */
changeWhiteBalance(uint8_t & y,uint8_t & u,uint8_t & v) const147 void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
148 uint8_t& u,
149 uint8_t& v) const {
150 float r_scale = mWhiteBalanceScale[0];
151 float b_scale = mWhiteBalanceScale[2];
152 int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
153 int g = YUV2G(y, u, v);
154 int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
155
156 y = RGB2Y(r, g, b);
157 u = RGB2U(r, g, b);
158 v = RGB2V(r, g, b);
159 }
160
checkAutoFocusTrigger()161 void EmulatedCameraDevice::checkAutoFocusTrigger() {
162 // The expected value is a reference so we need it to be a variable
163 bool expectedTrigger = true;
164 if (mTriggerAutoFocus.compare_exchange_strong(expectedTrigger, false)) {
165 // If the compare exchange returns true then the value was the expected
166 // 'true' and was successfully set to 'false'. So that means it's time
167 // to trigger an auto-focus event and that we have disabled that trigger
168 // so it won't happen until another request is received.
169 mCameraHAL->autoFocusComplete();
170 }
171 }
172
getCurrentFrameImpl(const uint8_t * source,uint8_t * dest,uint32_t pixelFormat) const173 status_t EmulatedCameraDevice::getCurrentFrameImpl(const uint8_t* source,
174 uint8_t* dest,
175 uint32_t pixelFormat) const {
176 if (pixelFormat == mPixelFormat) {
177 memcpy(dest, source, mFrameBufferSize);
178 return NO_ERROR;
179 } else if (pixelFormat == V4L2_PIX_FMT_YUV420 &&
180 mPixelFormat == V4L2_PIX_FMT_YVU420) {
181 // Convert from YV12 to YUV420 without alignment
182 const int ySize = mYStride * mFrameHeight;
183 const int uvSize = mUVStride * (mFrameHeight / 2);
184 if (mYStride == mFrameWidth) {
185 // Copy Y straight up
186 memcpy(dest, source, ySize);
187 } else {
188 // Strip alignment
189 for (int y = 0; y < mFrameHeight; ++y) {
190 memcpy(dest + y * mFrameWidth,
191 source + y * mYStride,
192 mFrameWidth);
193 }
194 }
195
196 if (mUVStride == mFrameWidth / 2) {
197 // Swap U and V
198 memcpy(dest + ySize, source + ySize + uvSize, uvSize);
199 memcpy(dest + ySize + uvSize, source + ySize, uvSize);
200 } else {
201 // Strip alignment
202 uint8_t* uvDest = dest + mFrameWidth * mFrameHeight;
203 const uint8_t* uvSource = source + ySize + uvSize;
204
205 for (int i = 0; i < 2; ++i) {
206 for (int y = 0; y < mFrameHeight / 2; ++y) {
207 memcpy(uvDest + y * (mFrameWidth / 2),
208 uvSource + y * mUVStride,
209 mFrameWidth / 2);
210 }
211 uvDest += (mFrameHeight / 2) * (mFrameWidth / 2);
212 uvSource -= uvSize;
213 }
214 }
215 return NO_ERROR;
216 }
217 ALOGE("%s: Invalid pixel format conversion [%.4s to %.4s] requested",
218 __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat),
219 reinterpret_cast<const char*>(&pixelFormat));
220 return EINVAL;
221 }
222
getCurrentFrame(void * buffer,uint32_t pixelFormat,int64_t * timestamp)223 status_t EmulatedCameraDevice::getCurrentFrame(void* buffer,
224 uint32_t pixelFormat,
225 int64_t* timestamp)
226 {
227 if (!isStarted()) {
228 ALOGE("%s: Device is not started", __FUNCTION__);
229 return EINVAL;
230 }
231 if (buffer == nullptr) {
232 ALOGE("%s: Invalid buffer provided", __FUNCTION__);
233 return EINVAL;
234 }
235
236 FrameLock lock(*this);
237 const void* source = mCameraThread->getPrimaryBuffer();
238 if (source == nullptr) {
239 ALOGE("%s: No framebuffer", __FUNCTION__);
240 return EINVAL;
241 }
242
243 if (timestamp != nullptr) {
244 *timestamp = mCameraThread->getPrimaryTimestamp();
245 }
246
247 return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(source),
248 reinterpret_cast<uint8_t*>(buffer),
249 pixelFormat);
250 }
251
getCurrentPreviewFrame(void * buffer,int64_t * timestamp)252 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer,
253 int64_t* timestamp)
254 {
255 if (!isStarted()) {
256 ALOGE("%s: Device is not started", __FUNCTION__);
257 return EINVAL;
258 }
259 if (buffer == nullptr) {
260 ALOGE("%s: Invalid buffer provided", __FUNCTION__);
261 return EINVAL;
262 }
263
264 FrameLock lock(*this);
265 const void* currentFrame = mCameraThread->getPrimaryBuffer();
266 if (currentFrame == nullptr) {
267 ALOGE("%s: No framebuffer", __FUNCTION__);
268 return EINVAL;
269 }
270
271 if (timestamp != nullptr) {
272 *timestamp = mCameraThread->getPrimaryTimestamp();
273 }
274
275 /* In emulation the framebuffer is never RGB. */
276 switch (mPixelFormat) {
277 case V4L2_PIX_FMT_YVU420:
278 YV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
279 return NO_ERROR;
280 case V4L2_PIX_FMT_YUV420:
281 YU12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
282 return NO_ERROR;
283 case V4L2_PIX_FMT_NV21:
284 NV21ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
285 return NO_ERROR;
286 case V4L2_PIX_FMT_NV12:
287 NV12ToRGB32(currentFrame, buffer, mFrameWidth, mFrameHeight);
288 return NO_ERROR;
289
290 default:
291 ALOGE("%s: Unknown pixel format %.4s",
292 __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
293 return EINVAL;
294 }
295 }
296
getCurrentFrame()297 const void* EmulatedCameraDevice::getCurrentFrame() {
298 if (mCameraThread.get()) {
299 return mCameraThread->getPrimaryBuffer();
300 }
301 return nullptr;
302 }
303
FrameLock(EmulatedCameraDevice & cameraDevice)304 EmulatedCameraDevice::FrameLock::FrameLock(EmulatedCameraDevice& cameraDevice)
305 : mCameraDevice(cameraDevice) {
306 mCameraDevice.lockCurrentFrame();
307 }
308
~FrameLock()309 EmulatedCameraDevice::FrameLock::~FrameLock() {
310 mCameraDevice.unlockCurrentFrame();
311 }
312
setAutoFocus()313 status_t EmulatedCameraDevice::setAutoFocus() {
314 mTriggerAutoFocus = true;
315 return NO_ERROR;
316 }
317
cancelAutoFocus()318 status_t EmulatedCameraDevice::cancelAutoFocus() {
319 mTriggerAutoFocus = false;
320 return NO_ERROR;
321 }
322
requestRestart(int width,int height,uint32_t pixelFormat,bool takingPicture,bool oneBurst)323 bool EmulatedCameraDevice::requestRestart(int width, int height,
324 uint32_t pixelFormat,
325 bool takingPicture, bool oneBurst) {
326 if (mCameraThread.get() == nullptr) {
327 ALOGE("%s: No thread alive to perform the restart, is preview on?",
328 __FUNCTION__);
329 return false;
330 }
331 mCameraThread->requestRestart(width, height, pixelFormat,
332 takingPicture, oneBurst);
333 return true;
334 }
335
336 /****************************************************************************
337 * Emulated camera device private API
338 ***************************************************************************/
339
commonStartDevice(int width,int height,uint32_t pix_fmt)340 status_t EmulatedCameraDevice::commonStartDevice(int width,
341 int height,
342 uint32_t pix_fmt)
343 {
344 /* Validate pixel format, and calculate framebuffer size at the same time. */
345 switch (pix_fmt) {
346 case V4L2_PIX_FMT_YVU420:
347 case V4L2_PIX_FMT_YUV420:
348 // For these pixel formats the strides have to be aligned to 16 byte
349 // boundaries as per the format specification
350 // https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
351 mYStride = align(width, 16);
352 mUVStride = align(mYStride / 2, 16);
353 // The second term should use half the height but since there are
354 // two planes the multiplication with two cancels that out
355 mFrameBufferSize = mYStride * height + mUVStride * height;
356 break;
357 case V4L2_PIX_FMT_NV21:
358 case V4L2_PIX_FMT_NV12:
359 mYStride = width;
360 // Because of interleaving the UV stride is the same as the Y stride
361 // since it covers two pixels, one U and one V.
362 mUVStride = mYStride;
363 // Since the U/V stride covers both U and V we don't multiply by two
364 mFrameBufferSize = mYStride * height + mUVStride * (height / 2);
365 break;
366 default:
367 ALOGE("%s: Unknown pixel format %.4s",
368 __FUNCTION__, reinterpret_cast<const char*>(&pix_fmt));
369 return EINVAL;
370 }
371
372 /* Cache framebuffer info. */
373 mFrameWidth = width;
374 mFrameHeight = height;
375 mPixelFormat = pix_fmt;
376 mTotalPixels = width * height;
377
378 /* Allocate framebuffer. */
379 mFrameBuffers[0].resize(mFrameBufferSize);
380 mFrameBuffers[1].resize(mFrameBufferSize);
381 ALOGV("%s: Allocated %zu bytes for %d pixels in %.4s[%dx%d] frame",
382 __FUNCTION__, mFrameBufferSize, mTotalPixels,
383 reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
384 return NO_ERROR;
385 }
386
commonStopDevice()387 void EmulatedCameraDevice::commonStopDevice()
388 {
389 mFrameWidth = mFrameHeight = mTotalPixels = 0;
390 mPixelFormat = 0;
391
392 mFrameBuffers[0].clear();
393 mFrameBuffers[1].clear();
394 // No need to keep all that memory allocated if the camera isn't running
395 mFrameBuffers[0].shrink_to_fit();
396 mFrameBuffers[1].shrink_to_fit();
397 }
398
399 /****************************************************************************
400 * Worker thread management.
401 ***************************************************************************/
402
startWorkerThread(bool one_burst)403 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
404 {
405 ALOGV("%s", __FUNCTION__);
406
407 if (!isInitialized()) {
408 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
409 return EINVAL;
410 }
411
412 mCameraThread = new CameraThread(this, staticProduceFrame, this);
413 if (mCameraThread == NULL) {
414 ALOGE("%s: Unable to instantiate CameraThread object", __FUNCTION__);
415 return ENOMEM;
416 }
417 status_t res = mCameraThread->startThread(one_burst);
418 if (res != NO_ERROR) {
419 ALOGE("%s: Unable to start CameraThread: %s",
420 __FUNCTION__, strerror(res));
421 return res;
422 }
423
424 return res;
425 }
426
stopWorkerThread()427 status_t EmulatedCameraDevice::stopWorkerThread()
428 {
429 ALOGV("%s", __FUNCTION__);
430
431 if (!isInitialized()) {
432 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
433 return EINVAL;
434 }
435
436 status_t res = mCameraThread->stopThread();
437 if (res != NO_ERROR) {
438 ALOGE("%s: Unable to stop CameraThread", __FUNCTION__);
439 return res;
440 }
441 res = mCameraThread->joinThread();
442 if (res != NO_ERROR) {
443 ALOGE("%s: Unable to join CameraThread", __FUNCTION__);
444 return res;
445 }
446
447 // Destroy the thread as well
448 mCameraThread.clear();
449 return res;
450 }
451
CameraThread(EmulatedCameraDevice * dev,ProduceFrameFunc producer,void * producerOpaque)452 EmulatedCameraDevice::CameraThread::CameraThread(EmulatedCameraDevice* dev,
453 ProduceFrameFunc producer,
454 void* producerOpaque)
455 : WorkerThread("Camera_CameraThread", dev, dev->mCameraHAL),
456 mCurFrameTimestamp(0),
457 mProducerFunc(producer),
458 mProducerOpaque(producerOpaque),
459 mRestartWidth(0),
460 mRestartHeight(0),
461 mRestartPixelFormat(0),
462 mRestartOneBurst(false),
463 mRestartTakingPicture(false),
464 mRestartRequested(false) {
465
466 }
467
getPrimaryBuffer() const468 const void* EmulatedCameraDevice::CameraThread::getPrimaryBuffer() const {
469 if (mFrameProducer.get()) {
470 return mFrameProducer->getPrimaryBuffer();
471 }
472 return nullptr;
473 }
474
getPrimaryTimestamp() const475 int64_t EmulatedCameraDevice::CameraThread::getPrimaryTimestamp() const {
476 if (mFrameProducer.get()) {
477 return mFrameProducer->getPrimaryTimestamp();
478 }
479 return 0L;
480 }
481
lockPrimaryBuffer()482 void EmulatedCameraDevice::CameraThread::lockPrimaryBuffer() {
483 mFrameProducer->lockPrimaryBuffer();
484 }
485
unlockPrimaryBuffer()486 void EmulatedCameraDevice::CameraThread::unlockPrimaryBuffer() {
487 mFrameProducer->unlockPrimaryBuffer();
488 }
489
490 bool
waitForFrameOrTimeout(nsecs_t timeout)491 EmulatedCameraDevice::CameraThread::waitForFrameOrTimeout(nsecs_t timeout) {
492 // Keep waiting until the frame producer indicates that a frame is available
493 // This does introduce some unnecessary latency to the first frame delivery
494 // but avoids a lot of thread synchronization.
495 do {
496 // We don't have any specific fd we want to select so we pass in -1
497 // timeout is in nanoseconds but Select expects microseconds
498 Mutex::Autolock lock(mRunningMutex);
499 mRunningCondition.waitRelative(mRunningMutex, timeout);
500 if (!mRunning) {
501 ALOGV("%s: CameraThread has been terminated.", __FUNCTION__);
502 return false;
503 }
504 // Set a short timeout in case there is no frame available and we are
505 // going to loop. This way we ensure a sleep but keep a decent latency
506 timeout = milliseconds(5);
507 } while (!mFrameProducer->hasFrame());
508
509 return true;
510 }
511
inWorkerThread()512 bool EmulatedCameraDevice::CameraThread::inWorkerThread() {
513 /* Wait till FPS timeout expires, or thread exit message is received. */
514 nsecs_t wakeAt =
515 mCurFrameTimestamp + 1000000000.0 / mCameraDevice->mFramesPerSecond;
516 nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
517 nsecs_t timeout = std::max<nsecs_t>(0, wakeAt - now);
518
519 if (!waitForFrameOrTimeout(timeout)) {
520 return false;
521 }
522
523 /* Check if a restart and potentially apply the requested changes */
524 if (!checkRestartRequest()) {
525 return false;
526 }
527
528 /* Check if an auto-focus event needs to be triggered */
529 mCameraDevice->checkAutoFocusTrigger();
530
531 mCurFrameTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
532 mCameraHAL->onNextFrameAvailable(mCurFrameTimestamp, mCameraDevice);
533
534 return true;
535 }
536
onThreadStart()537 status_t EmulatedCameraDevice::CameraThread::onThreadStart() {
538 void* primaryBuffer = mCameraDevice->getPrimaryBuffer();
539 void* secondaryBuffer = mCameraDevice->getSecondaryBuffer();
540 mFrameProducer = new FrameProducer(mCameraDevice,
541 mProducerFunc, mProducerOpaque,
542 primaryBuffer, secondaryBuffer);
543 if (mFrameProducer.get() == nullptr) {
544 ALOGE("%s: Could not instantiate FrameProducer object", __FUNCTION__);
545 return ENOMEM;
546 }
547 return mFrameProducer->startThread(mOneBurst);
548 }
549
onThreadExit()550 void EmulatedCameraDevice::CameraThread::onThreadExit() {
551 if (mFrameProducer.get()) {
552 if (mFrameProducer->stopThread() == NO_ERROR) {
553 mFrameProducer->joinThread();
554 mFrameProducer.clear();
555 }
556 }
557 }
558
FrameProducer(EmulatedCameraDevice * dev,ProduceFrameFunc producer,void * opaque,void * primaryBuffer,void * secondaryBuffer)559 EmulatedCameraDevice::CameraThread::FrameProducer::FrameProducer(
560 EmulatedCameraDevice* dev,
561 ProduceFrameFunc producer,
562 void* opaque,
563 void* primaryBuffer,
564 void* secondaryBuffer)
565 : WorkerThread("Camera_FrameProducer", dev, dev->mCameraHAL),
566 mProducer(producer),
567 mOpaque(opaque),
568 mPrimaryBuffer(primaryBuffer),
569 mSecondaryBuffer(secondaryBuffer),
570 mPrimaryTimestamp(0L),
571 mSecondaryTimestamp(0L),
572 mLastFrame(0),
573 mHasFrame(false) {
574
575 }
576
577 const void*
getPrimaryBuffer() const578 EmulatedCameraDevice::CameraThread::FrameProducer::getPrimaryBuffer() const {
579 return mPrimaryBuffer;
580 }
581
582 int64_t
getPrimaryTimestamp() const583 EmulatedCameraDevice::CameraThread::FrameProducer::getPrimaryTimestamp() const {
584 return mPrimaryTimestamp;
585 }
586
lockPrimaryBuffer()587 void EmulatedCameraDevice::CameraThread::FrameProducer::lockPrimaryBuffer() {
588 mBufferMutex.lock();
589 }
unlockPrimaryBuffer()590 void EmulatedCameraDevice::CameraThread::FrameProducer::unlockPrimaryBuffer() {
591 mBufferMutex.unlock();
592 }
593
requestRestart(int width,int height,uint32_t pixelFormat,bool takingPicture,bool oneBurst)594 void EmulatedCameraDevice::CameraThread::requestRestart(int width,
595 int height,
596 uint32_t pixelFormat,
597 bool takingPicture,
598 bool oneBurst) {
599 Mutex::Autolock lock(mRequestMutex);
600 mRestartWidth = width;
601 mRestartHeight = height;
602 mRestartPixelFormat = pixelFormat;
603 mRestartTakingPicture = takingPicture;
604 mRestartOneBurst = oneBurst;
605 mRestartRequested = true;
606 }
607
hasFrame() const608 bool EmulatedCameraDevice::CameraThread::FrameProducer::hasFrame() const {
609 return mHasFrame;
610 }
611
checkRestartRequest()612 bool EmulatedCameraDevice::CameraThread::checkRestartRequest() {
613 Mutex::Autolock lock(mRequestMutex);
614 if (mRestartRequested) {
615 mRestartRequested = false;
616 status_t res = mFrameProducer->stopThread();
617 if (res != NO_ERROR) {
618 ALOGE("%s: Could not stop frame producer thread", __FUNCTION__);
619 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
620 return false;
621 }
622 res = mFrameProducer->joinThread();
623 if (res != NO_ERROR) {
624 ALOGE("%s: Could not join frame producer thread", __FUNCTION__);
625 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
626 return false;
627 }
628 mFrameProducer.clear();
629 res = mCameraDevice->stopDevice();
630 if (res != NO_ERROR) {
631 ALOGE("%s: Could not stop device", __FUNCTION__);
632 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
633 return false;
634 }
635 res = mCameraDevice->startDevice(mRestartWidth,
636 mRestartHeight,
637 mRestartPixelFormat);
638 if (res != NO_ERROR) {
639 ALOGE("%s: Could not start device", __FUNCTION__);
640 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
641 return false;
642 }
643 if (mRestartTakingPicture) {
644 mCameraHAL->setTakingPicture(true);
645 }
646 mOneBurst = mRestartOneBurst;
647
648 // Pretend like this a thread start, performs the remaining setup
649 if (onThreadStart() != NO_ERROR) {
650 mCameraDevice->stopDevice();
651 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
652 return false;
653 }
654
655 // Now wait for the frame producer to start producing before we proceed
656 return waitForFrameOrTimeout(0);
657 }
658 return true;
659 }
660
inWorkerThread()661 bool EmulatedCameraDevice::CameraThread::FrameProducer::inWorkerThread() {
662 nsecs_t nextFrame =
663 mLastFrame + 1000000000 / mCameraDevice->mFramesPerSecond;
664 nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
665 nsecs_t timeout = std::max<nsecs_t>(0, nextFrame - now);
666
667 {
668 Mutex::Autolock lock(mRunningMutex);
669 mRunningCondition.waitRelative(mRunningMutex, timeout);
670 if (!mRunning) {
671 ALOGV("%s: FrameProducer has been terminated.", __FUNCTION__);
672 return false;
673 }
674 }
675
676 // Produce one frame and place it in the secondary buffer
677 mLastFrame = systemTime(SYSTEM_TIME_MONOTONIC);
678 if (!mProducer(mOpaque, mSecondaryBuffer, &mSecondaryTimestamp)) {
679 ALOGE("FrameProducer could not produce frame, exiting thread");
680 mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
681 return false;
682 }
683
684 {
685 // Switch buffers now that the secondary buffer is ready
686 Mutex::Autolock lock(mBufferMutex);
687 std::swap(mPrimaryBuffer, mSecondaryBuffer);
688 std::swap(mPrimaryTimestamp, mSecondaryTimestamp);
689 }
690 mHasFrame = true;
691 return true;
692 }
693
lockCurrentFrame()694 void EmulatedCameraDevice::lockCurrentFrame() {
695 mCameraThread->lockPrimaryBuffer();
696 }
697
unlockCurrentFrame()698 void EmulatedCameraDevice::unlockCurrentFrame() {
699 mCameraThread->unlockPrimaryBuffer();
700 }
701
702 }; /* namespace android */
703