1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 #include <inttypes.h>
23
24 //#define LOG_NDEBUG 0
25 #define LOG_TAG "EmulatedCamera_FakeCamera2"
26 #include <log/log.h>
27
28 #include "EmulatedFakeCamera2.h"
29 #include "EmulatedCameraFactory.h"
30 #include "GrallocModule.h"
31
32 #define ERROR_CAMERA_NOT_PRESENT (-EPIPE)
33
34 #define CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT 0xFFFFFFFF
35
36 namespace android {
37
38 const int64_t USEC = 1000LL;
39 const int64_t MSEC = USEC * 1000LL;
40 const int64_t SEC = MSEC * 1000LL;
41
42 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
43 HAL_PIXEL_FORMAT_RAW16,
44 HAL_PIXEL_FORMAT_BLOB,
45 HAL_PIXEL_FORMAT_RGBA_8888,
46 // HAL_PIXEL_FORMAT_YV12,
47 HAL_PIXEL_FORMAT_YCrCb_420_SP
48 };
49
50 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
51 640, 480
52 // mSensorWidth, mSensorHeight
53 };
54
55 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
56 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
57 };
58
59 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
60 640, 480, 320, 240
61 // mSensorWidth, mSensorHeight
62 };
63
64 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
65 320, 240, 160, 120
66 // mSensorWidth, mSensorHeight
67 };
68
69 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
70 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
71 };
72
73 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
74 640, 480
75 // mSensorWidth, mSensorHeight
76 };
77
78 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
79 320, 240
80 // mSensorWidth, mSensorHeight
81 };
82
83
84 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
85 static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
86 };
87
88
EmulatedFakeCamera2(int cameraId,bool facingBack,struct hw_module_t * module)89 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
90 bool facingBack,
91 struct hw_module_t* module)
92 : EmulatedCamera2(cameraId,module),
93 mFacingBack(facingBack),
94 mIsConnected(false)
95 {
96 ALOGD("Constructing emulated fake camera 2 facing %s",
97 facingBack ? "back" : "front");
98 }
99
~EmulatedFakeCamera2()100 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
101 if (mCameraInfo != NULL) {
102 free_camera_metadata(mCameraInfo);
103 }
104 }
105
106 /****************************************************************************
107 * Public API overrides
108 ***************************************************************************/
109
Initialize()110 status_t EmulatedFakeCamera2::Initialize() {
111 status_t res;
112
113 // Find max width/height
114 int32_t width = 0, height = 0;
115 size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
116 for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
117 if (width <= (int32_t)kAvailableRawSizes[index] &&
118 height <= (int32_t)kAvailableRawSizes[index+1]) {
119 width = kAvailableRawSizes[index];
120 height = kAvailableRawSizes[index+1];
121 }
122 }
123
124 if (width < 640 || height < 480) {
125 width = 640;
126 height = 480;
127 }
128 mSensorWidth = width;
129 mSensorHeight = height;
130
131 res = constructStaticInfo(&mCameraInfo, true);
132 if (res != OK) {
133 ALOGE("%s: Unable to allocate static info: %s (%d)",
134 __FUNCTION__, strerror(-res), res);
135 return res;
136 }
137 res = constructStaticInfo(&mCameraInfo, false);
138 if (res != OK) {
139 ALOGE("%s: Unable to fill in static info: %s (%d)",
140 __FUNCTION__, strerror(-res), res);
141 return res;
142 }
143 if (res != OK) return res;
144
145 mNextStreamId = 1;
146 mNextReprocessStreamId = 1;
147 mRawStreamCount = 0;
148 mProcessedStreamCount = 0;
149 mJpegStreamCount = 0;
150 mReprocessStreamCount = 0;
151
152 return NO_ERROR;
153 }
154
155 /****************************************************************************
156 * Camera module API overrides
157 ***************************************************************************/
158
connectCamera(hw_device_t ** device)159 status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
160 status_t res;
161 ALOGV("%s", __FUNCTION__);
162
163 {
164 Mutex::Autolock l(mMutex);
165 if (!mStatusPresent) {
166 ALOGE("%s: Camera ID %d is unplugged", __FUNCTION__,
167 mCameraID);
168 return -ENODEV;
169 }
170 }
171
172 mConfigureThread = new ConfigureThread(this);
173 mReadoutThread = new ReadoutThread(this);
174 mControlThread = new ControlThread(this);
175 mSensor = new Sensor(mSensorWidth, mSensorHeight);
176 mJpegCompressor = new JpegCompressor();
177
178 mNextStreamId = 1;
179 mNextReprocessStreamId = 1;
180
181 res = mSensor->startUp();
182 if (res != NO_ERROR) return res;
183
184 res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
185 if (res != NO_ERROR) return res;
186
187 res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
188 if (res != NO_ERROR) return res;
189
190 res = mControlThread->run("EmulatedFakeCamera2::controlThread");
191 if (res != NO_ERROR) return res;
192
193 status_t ret = EmulatedCamera2::connectCamera(device);
194
195 if (ret >= 0) {
196 mIsConnected = true;
197 }
198
199 return ret;
200 }
201
plugCamera()202 status_t EmulatedFakeCamera2::plugCamera() {
203 {
204 Mutex::Autolock l(mMutex);
205
206 if (!mStatusPresent) {
207 ALOGI("%s: Plugged back in", __FUNCTION__);
208 mStatusPresent = true;
209 }
210 }
211
212 return NO_ERROR;
213 }
214
unplugCamera()215 status_t EmulatedFakeCamera2::unplugCamera() {
216 {
217 Mutex::Autolock l(mMutex);
218
219 if (mStatusPresent) {
220 ALOGI("%s: Unplugged camera", __FUNCTION__);
221 mStatusPresent = false;
222 }
223 }
224
225 return closeCamera();
226 }
227
getHotplugStatus()228 camera_device_status_t EmulatedFakeCamera2::getHotplugStatus() {
229 Mutex::Autolock l(mMutex);
230 return mStatusPresent ?
231 CAMERA_DEVICE_STATUS_PRESENT :
232 CAMERA_DEVICE_STATUS_NOT_PRESENT;
233 }
234
235
236
closeCamera()237 status_t EmulatedFakeCamera2::closeCamera() {
238 {
239 Mutex::Autolock l(mMutex);
240
241 status_t res;
242 ALOGV("%s", __FUNCTION__);
243
244 if (!mIsConnected) {
245 return NO_ERROR;
246 }
247
248 res = mSensor->shutDown();
249 if (res != NO_ERROR) {
250 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
251 return res;
252 }
253
254 mConfigureThread->requestExit();
255 mReadoutThread->requestExit();
256 mControlThread->requestExit();
257 mJpegCompressor->cancel();
258 }
259
260 // give up the lock since we will now block and the threads
261 // can call back into this object
262 mConfigureThread->join();
263 mReadoutThread->join();
264 mControlThread->join();
265
266 ALOGV("%s exit", __FUNCTION__);
267
268 {
269 Mutex::Autolock l(mMutex);
270 mIsConnected = false;
271 }
272
273 return NO_ERROR;
274 }
275
getCameraInfo(struct camera_info * info)276 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
277 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
278 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
279 return EmulatedCamera2::getCameraInfo(info);
280 }
281
282 /****************************************************************************
283 * Camera device API overrides
284 ***************************************************************************/
285
286 /** Request input queue */
287
requestQueueNotify()288 int EmulatedFakeCamera2::requestQueueNotify() {
289 ALOGV("Request queue notification received");
290
291 ALOG_ASSERT(mRequestQueueSrc != NULL,
292 "%s: Request queue src not set, but received queue notification!",
293 __FUNCTION__);
294 ALOG_ASSERT(mFrameQueueDst != NULL,
295 "%s: Request queue src not set, but received queue notification!",
296 __FUNCTION__);
297 ALOG_ASSERT(mStreams.size() != 0,
298 "%s: No streams allocated, but received queue notification!",
299 __FUNCTION__);
300 return mConfigureThread->newRequestAvailable();
301 }
302
getInProgressCount()303 int EmulatedFakeCamera2::getInProgressCount() {
304 Mutex::Autolock l(mMutex);
305
306 if (!mStatusPresent) {
307 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
308 return ERROR_CAMERA_NOT_PRESENT;
309 }
310
311 int requestCount = 0;
312 requestCount += mConfigureThread->getInProgressCount();
313 requestCount += mReadoutThread->getInProgressCount();
314 requestCount += mJpegCompressor->isBusy() ? 1 : 0;
315
316 return requestCount;
317 }
318
constructDefaultRequest(int request_template,camera_metadata_t ** request)319 int EmulatedFakeCamera2::constructDefaultRequest(
320 int request_template,
321 camera_metadata_t **request) {
322
323 if (request == NULL) return BAD_VALUE;
324 if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
325 return BAD_VALUE;
326 }
327
328 {
329 Mutex::Autolock l(mMutex);
330 if (!mStatusPresent) {
331 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
332 return ERROR_CAMERA_NOT_PRESENT;
333 }
334 }
335
336 status_t res;
337 // Pass 1, calculate size and allocate
338 res = constructDefaultRequest(request_template,
339 request,
340 true);
341 if (res != OK) {
342 return res;
343 }
344 // Pass 2, build request
345 res = constructDefaultRequest(request_template,
346 request,
347 false);
348 if (res != OK) {
349 ALOGE("Unable to populate new request for template %d",
350 request_template);
351 }
352
353 return res;
354 }
355
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)356 int EmulatedFakeCamera2::allocateStream(
357 uint32_t width,
358 uint32_t height,
359 int format,
360 const camera2_stream_ops_t *stream_ops,
361 uint32_t *stream_id,
362 uint32_t *format_actual,
363 uint32_t *usage,
364 uint32_t *max_buffers) {
365 Mutex::Autolock l(mMutex);
366
367 if (!mStatusPresent) {
368 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
369 return ERROR_CAMERA_NOT_PRESENT;
370 }
371
372 // Temporary shim until FORMAT_ZSL is removed
373 if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
374 format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
375 }
376
377 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
378 unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
379 unsigned int formatIdx = 0;
380 for (; formatIdx < numFormats; formatIdx++) {
381 if (format == (int)kAvailableFormats[formatIdx]) break;
382 }
383 if (formatIdx == numFormats) {
384 ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
385 return BAD_VALUE;
386 }
387 }
388
389 const uint32_t *availableSizes;
390 size_t availableSizeCount;
391 switch (format) {
392 case HAL_PIXEL_FORMAT_RAW16:
393 availableSizes = kAvailableRawSizes;
394 availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
395 break;
396 case HAL_PIXEL_FORMAT_BLOB:
397 availableSizes = mFacingBack ?
398 kAvailableJpegSizesBack : kAvailableJpegSizesFront;
399 availableSizeCount = mFacingBack ?
400 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t) :
401 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t);
402 break;
403 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
404 case HAL_PIXEL_FORMAT_RGBA_8888:
405 case HAL_PIXEL_FORMAT_YV12:
406 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
407 availableSizes = mFacingBack ?
408 kAvailableProcessedSizesBack : kAvailableProcessedSizesFront;
409 availableSizeCount = mFacingBack ?
410 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t) :
411 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t);
412 break;
413 default:
414 ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
415 return BAD_VALUE;
416 }
417
418 unsigned int resIdx = 0;
419 for (; resIdx < availableSizeCount; resIdx++) {
420 if (availableSizes[resIdx * 2] == width &&
421 availableSizes[resIdx * 2 + 1] == height) break;
422 }
423 if (resIdx == availableSizeCount) {
424 ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
425 format, width, height);
426 return BAD_VALUE;
427 }
428
429 switch (format) {
430 case HAL_PIXEL_FORMAT_RAW16:
431 if (mRawStreamCount >= kMaxRawStreamCount) {
432 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
433 __FUNCTION__, mRawStreamCount);
434 return INVALID_OPERATION;
435 }
436 mRawStreamCount++;
437 break;
438 case HAL_PIXEL_FORMAT_BLOB:
439 if (mJpegStreamCount >= kMaxJpegStreamCount) {
440 ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
441 __FUNCTION__, mJpegStreamCount);
442 return INVALID_OPERATION;
443 }
444 mJpegStreamCount++;
445 break;
446 default:
447 if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
448 ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
449 __FUNCTION__, mProcessedStreamCount);
450 return INVALID_OPERATION;
451 }
452 mProcessedStreamCount++;
453 }
454
455 Stream newStream;
456 newStream.ops = stream_ops;
457 newStream.width = width;
458 newStream.height = height;
459 newStream.format = format;
460 // TODO: Query stride from gralloc
461 newStream.stride = width;
462
463 mStreams.add(mNextStreamId, newStream);
464
465 *stream_id = mNextStreamId;
466 if (format_actual) *format_actual = format;
467 *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
468 *max_buffers = kMaxBufferCount;
469
470 ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
471 *stream_id, width, height, format, *usage, *max_buffers);
472
473 mNextStreamId++;
474 return NO_ERROR;
475 }
476
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t * buffers)477 int EmulatedFakeCamera2::registerStreamBuffers(
478 uint32_t stream_id,
479 int num_buffers,
480 buffer_handle_t *buffers) {
481 Mutex::Autolock l(mMutex);
482
483 if (!mStatusPresent) {
484 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
485 return ERROR_CAMERA_NOT_PRESENT;
486 }
487
488 ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
489 stream_id, num_buffers);
490 // Need to find out what the final concrete pixel format for our stream is
491 // Assumes that all buffers have the same format.
492 if (num_buffers < 1) {
493 ALOGE("%s: Stream %d only has %d buffers!",
494 __FUNCTION__, stream_id, num_buffers);
495 return BAD_VALUE;
496 }
497
498 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
499 if (streamIndex < 0) {
500 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
501 return BAD_VALUE;
502 }
503
504 Stream &stream = mStreams.editValueAt(streamIndex);
505
506 int finalFormat = stream.format;
507
508 if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
509 finalFormat = HAL_PIXEL_FORMAT_RGBA_8888;
510 }
511
512 ALOGV("%s: Stream %d format set to %x, previously %x",
513 __FUNCTION__, stream_id, finalFormat, stream.format);
514
515 stream.format = finalFormat;
516
517 return NO_ERROR;
518 }
519
releaseStream(uint32_t stream_id)520 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
521 Mutex::Autolock l(mMutex);
522
523 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
524 if (streamIndex < 0) {
525 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
526 return BAD_VALUE;
527 }
528
529 if (isStreamInUse(stream_id)) {
530 ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
531 stream_id);
532 return BAD_VALUE;
533 }
534
535 switch(mStreams.valueAt(streamIndex).format) {
536 case HAL_PIXEL_FORMAT_RAW16:
537 mRawStreamCount--;
538 break;
539 case HAL_PIXEL_FORMAT_BLOB:
540 mJpegStreamCount--;
541 break;
542 default:
543 mProcessedStreamCount--;
544 break;
545 }
546
547 mStreams.removeItemsAt(streamIndex);
548
549 return NO_ERROR;
550 }
551
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * stream_ops,uint32_t * stream_id)552 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
553 uint32_t output_stream_id,
554 const camera2_stream_in_ops_t *stream_ops,
555 uint32_t *stream_id) {
556 Mutex::Autolock l(mMutex);
557
558 if (!mStatusPresent) {
559 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
560 return ERROR_CAMERA_NOT_PRESENT;
561 }
562
563 ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
564 if (baseStreamIndex < 0) {
565 ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
566 return BAD_VALUE;
567 }
568
569 const Stream &baseStream = mStreams[baseStreamIndex];
570
571 // We'll reprocess anything we produced
572
573 if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
574 ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
575 __FUNCTION__, mReprocessStreamCount);
576 return INVALID_OPERATION;
577 }
578 mReprocessStreamCount++;
579
580 ReprocessStream newStream;
581 newStream.ops = stream_ops;
582 newStream.width = baseStream.width;
583 newStream.height = baseStream.height;
584 newStream.format = baseStream.format;
585 newStream.stride = baseStream.stride;
586 newStream.sourceStreamId = output_stream_id;
587
588 *stream_id = mNextReprocessStreamId;
589 mReprocessStreams.add(mNextReprocessStreamId, newStream);
590
591 ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
592 *stream_id, newStream.width, newStream.height, newStream.format,
593 output_stream_id);
594
595 mNextReprocessStreamId++;
596 return NO_ERROR;
597 }
598
releaseReprocessStream(uint32_t stream_id)599 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
600 Mutex::Autolock l(mMutex);
601
602 ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
603 if (streamIndex < 0) {
604 ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
605 return BAD_VALUE;
606 }
607
608 if (isReprocessStreamInUse(stream_id)) {
609 ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
610 stream_id);
611 return BAD_VALUE;
612 }
613
614 mReprocessStreamCount--;
615 mReprocessStreams.removeItemsAt(streamIndex);
616
617 return NO_ERROR;
618 }
619
triggerAction(uint32_t trigger_id,int32_t ext1,int32_t ext2)620 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
621 int32_t ext1,
622 int32_t ext2) {
623 Mutex::Autolock l(mMutex);
624
625 if (trigger_id == CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT) {
626 ALOGI("%s: Disconnect trigger - camera must be closed", __FUNCTION__);
627 mStatusPresent = false;
628
629 gEmulatedCameraFactory.onStatusChanged(
630 mCameraID,
631 CAMERA_DEVICE_STATUS_NOT_PRESENT);
632 }
633
634 if (!mStatusPresent) {
635 ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
636 return ERROR_CAMERA_NOT_PRESENT;
637 }
638
639 return mControlThread->triggerAction(trigger_id,
640 ext1, ext2);
641 }
642
643 /** Shutdown and debug methods */
644
dump(int fd)645 int EmulatedFakeCamera2::dump(int fd) {
646 String8 result;
647
648 result.appendFormat(" Camera HAL device: EmulatedFakeCamera2\n");
649 result.appendFormat(" Streams:\n");
650 for (size_t i = 0; i < mStreams.size(); i++) {
651 int id = mStreams.keyAt(i);
652 const Stream& s = mStreams.valueAt(i);
653 result.appendFormat(
654 " Stream %d: %d x %d, format 0x%x, stride %d\n",
655 id, s.width, s.height, s.format, s.stride);
656 }
657
658 write(fd, result.string(), result.size());
659
660 return NO_ERROR;
661 }
662
signalError()663 void EmulatedFakeCamera2::signalError() {
664 // TODO: Let parent know so we can shut down cleanly
665 ALOGE("Worker thread is signaling a serious error");
666 }
667
668 /** Pipeline control worker thread methods */
669
ConfigureThread(EmulatedFakeCamera2 * parent)670 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
671 Thread(false),
672 mParent(parent),
673 mRequestCount(0),
674 mNextBuffers(NULL) {
675 mRunning = false;
676 }
677
~ConfigureThread()678 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
679 }
680
readyToRun()681 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
682 Mutex::Autolock lock(mInputMutex);
683
684 ALOGV("Starting up ConfigureThread");
685 mRequest = NULL;
686 mActive = false;
687 mRunning = true;
688
689 mInputSignal.signal();
690 return NO_ERROR;
691 }
692
waitUntilRunning()693 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
694 Mutex::Autolock lock(mInputMutex);
695 if (!mRunning) {
696 ALOGV("Waiting for configure thread to start");
697 mInputSignal.wait(mInputMutex);
698 }
699 return OK;
700 }
701
newRequestAvailable()702 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
703 waitUntilRunning();
704
705 Mutex::Autolock lock(mInputMutex);
706
707 mActive = true;
708 mInputSignal.signal();
709
710 return OK;
711 }
712
isStreamInUse(uint32_t id)713 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
714 Mutex::Autolock lock(mInternalsMutex);
715
716 if (mNextBuffers == NULL) return false;
717 for (size_t i=0; i < mNextBuffers->size(); i++) {
718 if ((*mNextBuffers)[i].streamId == (int)id) return true;
719 }
720 return false;
721 }
722
getInProgressCount()723 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
724 Mutex::Autolock lock(mInputMutex);
725 return mRequestCount;
726 }
727
threadLoop()728 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
729 status_t res;
730
731 // Check if we're currently processing or just waiting
732 {
733 Mutex::Autolock lock(mInputMutex);
734 if (!mActive) {
735 // Inactive, keep waiting until we've been signaled
736 status_t res;
737 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
738 if (res != NO_ERROR && res != TIMED_OUT) {
739 ALOGE("%s: Error waiting for input requests: %d",
740 __FUNCTION__, res);
741 return false;
742 }
743 if (!mActive) return true;
744 ALOGV("New request available");
745 }
746 // Active
747 }
748
749 if (mRequest == NULL) {
750 Mutex::Autolock il(mInternalsMutex);
751
752 ALOGV("Configure: Getting next request");
753 res = mParent->mRequestQueueSrc->dequeue_request(
754 mParent->mRequestQueueSrc,
755 &mRequest);
756 if (res != NO_ERROR) {
757 ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
758 mParent->signalError();
759 return false;
760 }
761 if (mRequest == NULL) {
762 ALOGV("Configure: Request queue empty, going inactive");
763 // No requests available, go into inactive mode
764 Mutex::Autolock lock(mInputMutex);
765 mActive = false;
766 return true;
767 } else {
768 Mutex::Autolock lock(mInputMutex);
769 mRequestCount++;
770 }
771
772 camera_metadata_entry_t type;
773 res = find_camera_metadata_entry(mRequest,
774 ANDROID_REQUEST_TYPE,
775 &type);
776 if (res != NO_ERROR) {
777 ALOGE("%s: error reading request type", __FUNCTION__);
778 mParent->signalError();
779 return false;
780 }
781 bool success = false;;
782 switch (type.data.u8[0]) {
783 case ANDROID_REQUEST_TYPE_CAPTURE:
784 success = setupCapture();
785 break;
786 case ANDROID_REQUEST_TYPE_REPROCESS:
787 success = setupReprocess();
788 break;
789 default:
790 ALOGE("%s: Unexpected request type %d",
791 __FUNCTION__, type.data.u8[0]);
792 mParent->signalError();
793 break;
794 }
795 if (!success) return false;
796
797 }
798
799 if (mWaitingForReadout) {
800 bool readoutDone;
801 readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
802 if (!readoutDone) return true;
803
804 if (mNextNeedsJpeg) {
805 ALOGV("Configure: Waiting for JPEG compressor");
806 } else {
807 ALOGV("Configure: Waiting for sensor");
808 }
809 mWaitingForReadout = false;
810 }
811
812 if (mNextNeedsJpeg) {
813 bool jpegDone;
814 jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
815 if (!jpegDone) return true;
816
817 ALOGV("Configure: Waiting for sensor");
818 mNextNeedsJpeg = false;
819 }
820
821 if (mNextIsCapture) {
822 return configureNextCapture();
823 } else {
824 return configureNextReprocess();
825 }
826 }
827
setupCapture()828 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
829 status_t res;
830
831 mNextIsCapture = true;
832 // Get necessary parameters for sensor config
833 mParent->mControlThread->processRequest(mRequest);
834
835 camera_metadata_entry_t streams;
836 res = find_camera_metadata_entry(mRequest,
837 ANDROID_REQUEST_OUTPUT_STREAMS,
838 &streams);
839 if (res != NO_ERROR) {
840 ALOGE("%s: error reading output stream tag", __FUNCTION__);
841 mParent->signalError();
842 return false;
843 }
844
845 mNextBuffers = new Buffers;
846 mNextNeedsJpeg = false;
847 ALOGV("Configure: Setting up buffers for capture");
848 for (size_t i = 0; i < streams.count; i++) {
849 int streamId = streams.data.i32[i];
850 const Stream &s = mParent->getStreamInfo(streamId);
851 if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
852 ALOGE("%s: Stream %d does not have a concrete pixel format, but "
853 "is included in a request!", __FUNCTION__, streamId);
854 mParent->signalError();
855 return false;
856 }
857 StreamBuffer b;
858 b.streamId = streams.data.u8[i];
859 b.width = s.width;
860 b.height = s.height;
861 b.format = s.format;
862 b.stride = s.stride;
863 mNextBuffers->push_back(b);
864 ALOGV("Configure: Buffer %zu: Stream %d, %d x %d, format 0x%x, "
865 "stride %d",
866 i, b.streamId, b.width, b.height, b.format, b.stride);
867 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
868 mNextNeedsJpeg = true;
869 }
870 }
871
872 camera_metadata_entry_t e;
873 res = find_camera_metadata_entry(mRequest,
874 ANDROID_REQUEST_FRAME_COUNT,
875 &e);
876 if (res != NO_ERROR) {
877 ALOGE("%s: error reading frame count tag: %s (%d)",
878 __FUNCTION__, strerror(-res), res);
879 mParent->signalError();
880 return false;
881 }
882 mNextFrameNumber = *e.data.i32;
883
884 res = find_camera_metadata_entry(mRequest,
885 ANDROID_SENSOR_EXPOSURE_TIME,
886 &e);
887 if (res != NO_ERROR) {
888 ALOGE("%s: error reading exposure time tag: %s (%d)",
889 __FUNCTION__, strerror(-res), res);
890 mParent->signalError();
891 return false;
892 }
893 mNextExposureTime = *e.data.i64;
894
895 res = find_camera_metadata_entry(mRequest,
896 ANDROID_SENSOR_FRAME_DURATION,
897 &e);
898 if (res != NO_ERROR) {
899 ALOGE("%s: error reading frame duration tag", __FUNCTION__);
900 mParent->signalError();
901 return false;
902 }
903 mNextFrameDuration = *e.data.i64;
904
905 if (mNextFrameDuration <
906 mNextExposureTime + Sensor::kMinVerticalBlank) {
907 mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
908 }
909 res = find_camera_metadata_entry(mRequest,
910 ANDROID_SENSOR_SENSITIVITY,
911 &e);
912 if (res != NO_ERROR) {
913 ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
914 mParent->signalError();
915 return false;
916 }
917 mNextSensitivity = *e.data.i32;
918
919 // Start waiting on readout thread
920 mWaitingForReadout = true;
921 ALOGV("Configure: Waiting for readout thread");
922
923 return true;
924 }
925
configureNextCapture()926 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
927 bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
928 if (!vsync) return true;
929
930 Mutex::Autolock il(mInternalsMutex);
931 ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
932 mParent->mSensor->setExposureTime(mNextExposureTime);
933 mParent->mSensor->setFrameDuration(mNextFrameDuration);
934 mParent->mSensor->setSensitivity(mNextSensitivity);
935
936 getBuffers();
937
938 ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
939 mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
940 mParent->mSensor->setDestinationBuffers(mNextBuffers);
941
942 mRequest = NULL;
943 mNextBuffers = NULL;
944
945 Mutex::Autolock lock(mInputMutex);
946 mRequestCount--;
947
948 return true;
949 }
950
setupReprocess()951 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
952 status_t res;
953
954 mNextNeedsJpeg = true;
955 mNextIsCapture = false;
956
957 camera_metadata_entry_t reprocessStreams;
958 res = find_camera_metadata_entry(mRequest,
959 ANDROID_REQUEST_INPUT_STREAMS,
960 &reprocessStreams);
961 if (res != NO_ERROR) {
962 ALOGE("%s: error reading output stream tag", __FUNCTION__);
963 mParent->signalError();
964 return false;
965 }
966
967 mNextBuffers = new Buffers;
968
969 ALOGV("Configure: Setting up input buffers for reprocess");
970 for (size_t i = 0; i < reprocessStreams.count; i++) {
971 int streamId = reprocessStreams.data.i32[i];
972 const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
973 if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
974 ALOGE("%s: Only ZSL reprocessing supported!",
975 __FUNCTION__);
976 mParent->signalError();
977 return false;
978 }
979 StreamBuffer b;
980 b.streamId = -streamId;
981 b.width = s.width;
982 b.height = s.height;
983 b.format = s.format;
984 b.stride = s.stride;
985 mNextBuffers->push_back(b);
986 }
987
988 camera_metadata_entry_t streams;
989 res = find_camera_metadata_entry(mRequest,
990 ANDROID_REQUEST_OUTPUT_STREAMS,
991 &streams);
992 if (res != NO_ERROR) {
993 ALOGE("%s: error reading output stream tag", __FUNCTION__);
994 mParent->signalError();
995 return false;
996 }
997
998 ALOGV("Configure: Setting up output buffers for reprocess");
999 for (size_t i = 0; i < streams.count; i++) {
1000 int streamId = streams.data.i32[i];
1001 const Stream &s = mParent->getStreamInfo(streamId);
1002 if (s.format != HAL_PIXEL_FORMAT_BLOB) {
1003 // TODO: Support reprocess to YUV
1004 ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
1005 __FUNCTION__, streamId);
1006 mParent->signalError();
1007 return false;
1008 }
1009 StreamBuffer b;
1010 b.streamId = streams.data.u8[i];
1011 b.width = s.width;
1012 b.height = s.height;
1013 b.format = s.format;
1014 b.stride = s.stride;
1015 mNextBuffers->push_back(b);
1016 ALOGV("Configure: Buffer %zu: Stream %d, %d x %d, format 0x%x, "
1017 "stride %d",
1018 i, b.streamId, b.width, b.height, b.format, b.stride);
1019 }
1020
1021 camera_metadata_entry_t e;
1022 res = find_camera_metadata_entry(mRequest,
1023 ANDROID_REQUEST_FRAME_COUNT,
1024 &e);
1025 if (res != NO_ERROR) {
1026 ALOGE("%s: error reading frame count tag: %s (%d)",
1027 __FUNCTION__, strerror(-res), res);
1028 mParent->signalError();
1029 return false;
1030 }
1031 mNextFrameNumber = *e.data.i32;
1032
1033 return true;
1034 }
1035
configureNextReprocess()1036 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
1037 Mutex::Autolock il(mInternalsMutex);
1038
1039 getBuffers();
1040
1041 ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
1042 mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
1043
1044 mRequest = NULL;
1045 mNextBuffers = NULL;
1046
1047 Mutex::Autolock lock(mInputMutex);
1048 mRequestCount--;
1049
1050 return true;
1051 }
1052
getBuffers()1053 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
1054 status_t res;
1055 /** Get buffers to fill for this frame */
1056 for (size_t i = 0; i < mNextBuffers->size(); i++) {
1057 StreamBuffer &b = mNextBuffers->editItemAt(i);
1058
1059 if (b.streamId > 0) {
1060 Stream s = mParent->getStreamInfo(b.streamId);
1061 ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
1062 res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
1063 if (res != NO_ERROR || b.buffer == NULL) {
1064 ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
1065 __FUNCTION__, b.streamId, strerror(-res), res);
1066 mParent->signalError();
1067 return false;
1068 }
1069
1070 /* Lock the buffer from the perspective of the graphics mapper */
1071 res = GrallocModule::getInstance().lock(*(b.buffer),
1072 GRALLOC_USAGE_HW_CAMERA_WRITE,
1073 0, 0, s.width, s.height,
1074 (void**)&(b.img));
1075
1076
1077 if (res != NO_ERROR) {
1078 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
1079 __FUNCTION__, strerror(-res), res);
1080 s.ops->cancel_buffer(s.ops,
1081 b.buffer);
1082 mParent->signalError();
1083 return false;
1084 }
1085 } else {
1086 ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
1087 ALOGV("Configure: Acquiring buffer from reprocess stream %d",
1088 -b.streamId);
1089 res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
1090 if (res != NO_ERROR || b.buffer == NULL) {
1091 ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
1092 "%s (%d)", __FUNCTION__, -b.streamId,
1093 strerror(-res), res);
1094 mParent->signalError();
1095 return false;
1096 }
1097
1098 /* Lock the buffer from the perspective of the graphics mapper */
1099 res = GrallocModule::getInstance().lock(*(b.buffer),
1100 GRALLOC_USAGE_HW_CAMERA_READ,
1101 0, 0, s.width, s.height,
1102 (void**)&(b.img) );
1103 if (res != NO_ERROR) {
1104 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
1105 __FUNCTION__, strerror(-res), res);
1106 s.ops->release_buffer(s.ops,
1107 b.buffer);
1108 mParent->signalError();
1109 return false;
1110 }
1111 }
1112 }
1113 return true;
1114 }
1115
ReadoutThread(EmulatedFakeCamera2 * parent)1116 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
1117 Thread(false),
1118 mParent(parent),
1119 mRunning(false),
1120 mActive(false),
1121 mRequestCount(0),
1122 mRequest(NULL),
1123 mBuffers(NULL) {
1124 mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
1125 mInFlightHead = 0;
1126 mInFlightTail = 0;
1127 }
1128
~ReadoutThread()1129 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
1130 delete[] mInFlightQueue;
1131 }
1132
readyToRun()1133 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
1134 Mutex::Autolock lock(mInputMutex);
1135 ALOGV("Starting up ReadoutThread");
1136 mRunning = true;
1137 mInputSignal.signal();
1138 return NO_ERROR;
1139 }
1140
waitUntilRunning()1141 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
1142 Mutex::Autolock lock(mInputMutex);
1143 if (!mRunning) {
1144 ALOGV("Waiting for readout thread to start");
1145 mInputSignal.wait(mInputMutex);
1146 }
1147 return OK;
1148 }
1149
waitForReady(nsecs_t timeout)1150 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
1151 status_t res;
1152 Mutex::Autolock lock(mInputMutex);
1153 while (!readyForNextCapture()) {
1154 res = mReadySignal.waitRelative(mInputMutex, timeout);
1155 if (res == TIMED_OUT) return false;
1156 if (res != OK) {
1157 ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
1158 strerror(-res), res);
1159 return false;
1160 }
1161 }
1162 return true;
1163 }
1164
readyForNextCapture()1165 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
1166 return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
1167 }
1168
setNextOperation(bool isCapture,camera_metadata_t * request,Buffers * buffers)1169 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
1170 bool isCapture,
1171 camera_metadata_t *request,
1172 Buffers *buffers) {
1173 Mutex::Autolock lock(mInputMutex);
1174 if ( !readyForNextCapture() ) {
1175 ALOGE("In flight queue full, dropping captures");
1176 mParent->signalError();
1177 return;
1178 }
1179 mInFlightQueue[mInFlightTail].isCapture = isCapture;
1180 mInFlightQueue[mInFlightTail].request = request;
1181 mInFlightQueue[mInFlightTail].buffers = buffers;
1182 mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
1183 mRequestCount++;
1184
1185 if (!mActive) {
1186 mActive = true;
1187 mInputSignal.signal();
1188 }
1189 }
1190
isStreamInUse(uint32_t id)1191 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
1192 // acquire in same order as threadLoop
1193 Mutex::Autolock iLock(mInternalsMutex);
1194 Mutex::Autolock lock(mInputMutex);
1195
1196 size_t i = mInFlightHead;
1197 while (i != mInFlightTail) {
1198 for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
1199 if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
1200 return true;
1201 }
1202 i = (i + 1) % kInFlightQueueSize;
1203 }
1204
1205
1206 if (mBuffers != NULL) {
1207 for (i = 0; i < mBuffers->size(); i++) {
1208 if ( (*mBuffers)[i].streamId == (int)id) return true;
1209 }
1210 }
1211
1212 return false;
1213 }
1214
getInProgressCount()1215 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
1216 Mutex::Autolock lock(mInputMutex);
1217
1218 return mRequestCount;
1219 }
1220
threadLoop()1221 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
1222 static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
1223 status_t res;
1224 int32_t frameNumber;
1225
1226 // Check if we're currently processing or just waiting
1227 {
1228 Mutex::Autolock lock(mInputMutex);
1229 if (!mActive) {
1230 // Inactive, keep waiting until we've been signaled
1231 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
1232 if (res != NO_ERROR && res != TIMED_OUT) {
1233 ALOGE("%s: Error waiting for capture requests: %d",
1234 __FUNCTION__, res);
1235 mParent->signalError();
1236 return false;
1237 }
1238 if (!mActive) return true;
1239 }
1240 // Active, see if we need a new request
1241 if (mRequest == NULL) {
1242 if (mInFlightHead == mInFlightTail) {
1243 // Go inactive
1244 ALOGV("Waiting for sensor data");
1245 mActive = false;
1246 return true;
1247 } else {
1248 Mutex::Autolock iLock(mInternalsMutex);
1249 mReadySignal.signal();
1250 mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
1251 mRequest = mInFlightQueue[mInFlightHead].request;
1252 mBuffers = mInFlightQueue[mInFlightHead].buffers;
1253 mInFlightQueue[mInFlightHead].request = NULL;
1254 mInFlightQueue[mInFlightHead].buffers = NULL;
1255 mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
1256 ALOGV("Ready to read out request %p, %zu buffers",
1257 mRequest, mBuffers->size());
1258 }
1259 }
1260 }
1261
1262 // Active with request, wait on sensor to complete
1263
1264 nsecs_t captureTime;
1265
1266 if (mIsCapture) {
1267 bool gotFrame;
1268 gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
1269 &captureTime);
1270
1271 if (!gotFrame) return true;
1272 }
1273
1274 Mutex::Autolock iLock(mInternalsMutex);
1275
1276 camera_metadata_entry_t entry;
1277 if (!mIsCapture) {
1278 res = find_camera_metadata_entry(mRequest,
1279 ANDROID_SENSOR_TIMESTAMP,
1280 &entry);
1281 if (res != NO_ERROR) {
1282 ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
1283 __FUNCTION__, strerror(-res), res);
1284 mParent->signalError();
1285 return false;
1286 }
1287 captureTime = entry.data.i64[0];
1288 }
1289
1290 res = find_camera_metadata_entry(mRequest,
1291 ANDROID_REQUEST_FRAME_COUNT,
1292 &entry);
1293 if (res != NO_ERROR) {
1294 ALOGE("%s: error reading frame count tag: %s (%d)",
1295 __FUNCTION__, strerror(-res), res);
1296 mParent->signalError();
1297 return false;
1298 }
1299 frameNumber = *entry.data.i32;
1300
1301 res = find_camera_metadata_entry(mRequest,
1302 ANDROID_REQUEST_METADATA_MODE,
1303 &entry);
1304 if (res != NO_ERROR) {
1305 ALOGE("%s: error reading metadata mode tag: %s (%d)",
1306 __FUNCTION__, strerror(-res), res);
1307 mParent->signalError();
1308 return false;
1309 }
1310
1311 // Got sensor data and request, construct frame and send it out
1312 ALOGV("Readout: Constructing metadata and frames for request %d",
1313 frameNumber);
1314
1315 if (*entry.data.u8 == ANDROID_REQUEST_METADATA_MODE_FULL) {
1316 ALOGV("Readout: Metadata requested, constructing");
1317
1318 camera_metadata_t *frame = NULL;
1319
1320 size_t frame_entries = get_camera_metadata_entry_count(mRequest);
1321 size_t frame_data = get_camera_metadata_data_count(mRequest);
1322
1323 // TODO: Dynamically calculate based on enabled statistics, etc
1324 frame_entries += 10;
1325 frame_data += 100;
1326
1327 res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
1328 frame_entries, frame_data, &frame);
1329
1330 if (res != NO_ERROR || frame == NULL) {
1331 ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
1332 mParent->signalError();
1333 return false;
1334 }
1335
1336 res = append_camera_metadata(frame, mRequest);
1337 if (res != NO_ERROR) {
1338 ALOGE("Unable to append request metadata");
1339 }
1340
1341 if (mIsCapture) {
1342 add_camera_metadata_entry(frame,
1343 ANDROID_SENSOR_TIMESTAMP,
1344 &captureTime,
1345 1);
1346
1347 collectStatisticsMetadata(frame);
1348 // TODO: Collect all final values used from sensor in addition to timestamp
1349 }
1350
1351 ALOGV("Readout: Enqueue frame %d", frameNumber);
1352 mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
1353 frame);
1354 }
1355 ALOGV("Readout: Free request");
1356 res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
1357 if (res != NO_ERROR) {
1358 ALOGE("%s: Unable to return request buffer to queue: %d",
1359 __FUNCTION__, res);
1360 mParent->signalError();
1361 return false;
1362 }
1363 mRequest = NULL;
1364
1365 int compressedBufferIndex = -1;
1366 ALOGV("Readout: Processing %zu buffers", mBuffers->size());
1367 for (size_t i = 0; i < mBuffers->size(); i++) {
1368 const StreamBuffer &b = (*mBuffers)[i];
1369 ALOGV("Readout: Buffer %zu: Stream %d, %d x %d, format 0x%x, stride %d",
1370 i, b.streamId, b.width, b.height, b.format, b.stride);
1371 if (b.streamId > 0) {
1372 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
1373 // Assumes only one BLOB buffer type per capture
1374 compressedBufferIndex = i;
1375 } else {
1376 ALOGV("Readout: Sending image buffer %zu (%p) to output stream %d",
1377 i, (void*)*(b.buffer), b.streamId);
1378 GrallocModule::getInstance().unlock(*(b.buffer));
1379 const Stream &s = mParent->getStreamInfo(b.streamId);
1380 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
1381 if (res != OK) {
1382 ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
1383 strerror(-res), res);
1384 mParent->signalError();
1385 }
1386 }
1387 }
1388 }
1389
1390 if (compressedBufferIndex == -1) {
1391 delete mBuffers;
1392 } else {
1393 ALOGV("Readout: Starting JPEG compression for buffer %d, stream %d",
1394 compressedBufferIndex,
1395 (*mBuffers)[compressedBufferIndex].streamId);
1396 mJpegTimestamp = captureTime;
1397 // Takes ownership of mBuffers
1398 mParent->mJpegCompressor->start(mBuffers, this, nullptr);
1399 }
1400 mBuffers = NULL;
1401
1402 Mutex::Autolock l(mInputMutex);
1403 mRequestCount--;
1404 ALOGV("Readout: Done with request %d", frameNumber);
1405 return true;
1406 }
1407
onJpegDone(const StreamBuffer & jpegBuffer,bool success)1408 void EmulatedFakeCamera2::ReadoutThread::onJpegDone(
1409 const StreamBuffer &jpegBuffer, bool success) {
1410 status_t res;
1411 if (!success) {
1412 ALOGE("%s: Error queueing compressed image buffer %p",
1413 __FUNCTION__, jpegBuffer.buffer);
1414 mParent->signalError();
1415 return;
1416 }
1417
1418 // Write to JPEG output stream
1419 ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
1420 jpegBuffer.streamId);
1421
1422 GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
1423 const Stream &s = mParent->getStreamInfo(jpegBuffer.streamId);
1424 res = s.ops->enqueue_buffer(s.ops, mJpegTimestamp, jpegBuffer.buffer);
1425 }
1426
onJpegInputDone(const StreamBuffer & inputBuffer)1427 void EmulatedFakeCamera2::ReadoutThread::onJpegInputDone(
1428 const StreamBuffer &inputBuffer) {
1429 status_t res;
1430 GrallocModule::getInstance().unlock(*(inputBuffer.buffer));
1431 const ReprocessStream &s =
1432 mParent->getReprocessStreamInfo(-inputBuffer.streamId);
1433 res = s.ops->release_buffer(s.ops, inputBuffer.buffer);
1434 if (res != OK) {
1435 ALOGE("Error releasing reprocess buffer %p: %s (%d)",
1436 inputBuffer.buffer, strerror(-res), res);
1437 mParent->signalError();
1438 }
1439 }
1440
collectStatisticsMetadata(camera_metadata_t * frame)1441 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
1442 camera_metadata_t *frame) {
1443 // Completely fake face rectangles, don't correspond to real faces in scene
1444 ALOGV("Readout: Collecting statistics metadata");
1445
1446 status_t res;
1447 camera_metadata_entry_t entry;
1448 res = find_camera_metadata_entry(frame,
1449 ANDROID_STATISTICS_FACE_DETECT_MODE,
1450 &entry);
1451 if (res != OK) {
1452 ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
1453 return BAD_VALUE;
1454 }
1455
1456 if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) return OK;
1457
1458 // The coordinate system for the face regions is the raw sensor pixel
1459 // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
1460 // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
1461 // approximately place two faces on top of the windows of the house. No
1462 // actual faces exist there, but might one day. Note that this doesn't
1463 // account for the offsets used to account for aspect ratio differences, so
1464 // the rectangles don't line up quite right.
1465 const size_t numFaces = 2;
1466 int32_t rects[numFaces * 4] = {
1467 static_cast<int32_t>(mParent->mSensorWidth * 10 / 20),
1468 static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
1469 static_cast<int32_t>(mParent->mSensorWidth * 12 / 20),
1470 static_cast<int32_t>(mParent->mSensorHeight * 17 / 20),
1471
1472 static_cast<int32_t>(mParent->mSensorWidth * 16 / 20),
1473 static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
1474 static_cast<int32_t>(mParent->mSensorWidth * 18 / 20),
1475 static_cast<int32_t>(mParent->mSensorHeight * 17 / 20)
1476 };
1477 // To simulate some kind of real detection going on, we jitter the rectangles on
1478 // each frame by a few pixels in each dimension.
1479 for (size_t i = 0; i < numFaces * 4; i++) {
1480 rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
1481 }
1482 // The confidence scores (0-100) are similarly jittered.
1483 uint8_t scores[numFaces] = { 85, 95 };
1484 for (size_t i = 0; i < numFaces; i++) {
1485 scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
1486 }
1487
1488 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_RECTANGLES,
1489 rects, numFaces * 4);
1490 if (res != OK) {
1491 ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
1492 return BAD_VALUE;
1493 }
1494
1495 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_SCORES,
1496 scores, numFaces);
1497 if (res != OK) {
1498 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1499 return BAD_VALUE;
1500 }
1501
1502 if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE) return OK;
1503
1504 // Advanced face detection options - add eye/mouth coordinates. The
1505 // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
1506 // mouthX, mouthY). The mapping is the same as the face rectangles.
1507 int32_t features[numFaces * 6] = {
1508 static_cast<int32_t>(mParent->mSensorWidth * 10.5 / 20),
1509 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1510 static_cast<int32_t>(mParent->mSensorWidth * 11.5 / 20),
1511 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1512 static_cast<int32_t>(mParent->mSensorWidth * 11 / 20),
1513 static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
1514
1515 static_cast<int32_t>(mParent->mSensorWidth * 16.5 / 20),
1516 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1517 static_cast<int32_t>(mParent->mSensorWidth * 17.5 / 20),
1518 static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
1519 static_cast<int32_t>(mParent->mSensorWidth * 17 / 20),
1520 static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
1521 };
1522 // Jitter these a bit less than the rects
1523 for (size_t i = 0; i < numFaces * 6; i++) {
1524 features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
1525 }
1526 // These are unique IDs that are used to identify each face while it's
1527 // visible to the detector (if a face went away and came back, it'd get a
1528 // new ID).
1529 int32_t ids[numFaces] = {
1530 100, 200
1531 };
1532
1533 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_LANDMARKS,
1534 features, numFaces * 6);
1535 if (res != OK) {
1536 ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
1537 return BAD_VALUE;
1538 }
1539
1540 res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_IDS,
1541 ids, numFaces);
1542 if (res != OK) {
1543 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1544 return BAD_VALUE;
1545 }
1546
1547 return OK;
1548 }
1549
ControlThread(EmulatedFakeCamera2 * parent)1550 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent):
1551 Thread(false),
1552 mParent(parent) {
1553 mRunning = false;
1554 }
1555
~ControlThread()1556 EmulatedFakeCamera2::ControlThread::~ControlThread() {
1557 }
1558
readyToRun()1559 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
1560 Mutex::Autolock lock(mInputMutex);
1561
1562 ALOGV("Starting up ControlThread");
1563 mRunning = true;
1564 mStartAf = false;
1565 mCancelAf = false;
1566 mStartPrecapture = false;
1567
1568 mControlMode = ANDROID_CONTROL_MODE_AUTO;
1569
1570 mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
1571 mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1572
1573 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
1574 mAfModeChange = false;
1575
1576 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
1577 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
1578
1579 mAfTriggerId = 0;
1580 mPrecaptureTriggerId = 0;
1581
1582 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1583 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1584 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1585
1586 mExposureTime = kNormalExposureTime;
1587
1588 mInputSignal.signal();
1589 return NO_ERROR;
1590 }
1591
waitUntilRunning()1592 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
1593 Mutex::Autolock lock(mInputMutex);
1594 if (!mRunning) {
1595 ALOGV("Waiting for control thread to start");
1596 mInputSignal.wait(mInputMutex);
1597 }
1598 return OK;
1599 }
1600
1601 // Override android.control.* fields with 3A values before sending request to sensor
processRequest(camera_metadata_t * request)1602 status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
1603 Mutex::Autolock lock(mInputMutex);
1604 // TODO: Add handling for all android.control.* fields here
1605 camera_metadata_entry_t mode;
1606 status_t res;
1607
1608 #define READ_IF_OK(res, what, def) \
1609 (((res) == OK) ? (what) : (uint8_t)(def))
1610
1611 res = find_camera_metadata_entry(request,
1612 ANDROID_CONTROL_MODE,
1613 &mode);
1614 mControlMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_MODE_OFF);
1615
1616 // disable all 3A
1617 if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
1618 mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
1619 mSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
1620 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1621 mAeLock = ANDROID_CONTROL_AE_LOCK_ON;
1622 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1623 mAfModeChange = true;
1624 mStartAf = false;
1625 mCancelAf = true;
1626 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1627 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1628 return res;
1629 }
1630
1631 res = find_camera_metadata_entry(request,
1632 ANDROID_CONTROL_EFFECT_MODE,
1633 &mode);
1634 mEffectMode = READ_IF_OK(res, mode.data.u8[0],
1635 ANDROID_CONTROL_EFFECT_MODE_OFF);
1636
1637 res = find_camera_metadata_entry(request,
1638 ANDROID_CONTROL_SCENE_MODE,
1639 &mode);
1640 mSceneMode = READ_IF_OK(res, mode.data.u8[0],
1641 ANDROID_CONTROL_SCENE_MODE_DISABLED);
1642
1643 res = find_camera_metadata_entry(request,
1644 ANDROID_CONTROL_AF_MODE,
1645 &mode);
1646 if (mAfMode != mode.data.u8[0]) {
1647 ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
1648 mAfMode = mode.data.u8[0];
1649 mAfModeChange = true;
1650 mStartAf = false;
1651 mCancelAf = false;
1652 }
1653
1654 res = find_camera_metadata_entry(request,
1655 ANDROID_CONTROL_AE_MODE,
1656 &mode);
1657 mAeMode = READ_IF_OK(res, mode.data.u8[0],
1658 ANDROID_CONTROL_AE_MODE_OFF);
1659
1660 res = find_camera_metadata_entry(request,
1661 ANDROID_CONTROL_AE_LOCK,
1662 &mode);
1663 uint8_t aeLockVal = READ_IF_OK(res, mode.data.u8[0],
1664 ANDROID_CONTROL_AE_LOCK_ON);
1665 bool aeLock = (aeLockVal == ANDROID_CONTROL_AE_LOCK_ON);
1666 if (mAeLock && !aeLock) {
1667 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1668 }
1669 mAeLock = aeLock;
1670
1671 res = find_camera_metadata_entry(request,
1672 ANDROID_CONTROL_AWB_MODE,
1673 &mode);
1674 mAwbMode = READ_IF_OK(res, mode.data.u8[0],
1675 ANDROID_CONTROL_AWB_MODE_OFF);
1676
1677 // TODO: Override more control fields
1678
1679 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
1680 camera_metadata_entry_t exposureTime;
1681 res = find_camera_metadata_entry(request,
1682 ANDROID_SENSOR_EXPOSURE_TIME,
1683 &exposureTime);
1684 if (res == OK) {
1685 exposureTime.data.i64[0] = mExposureTime;
1686 }
1687 }
1688
1689 #undef READ_IF_OK
1690
1691 return OK;
1692 }
1693
triggerAction(uint32_t msgType,int32_t ext1,int32_t ext2)1694 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
1695 int32_t ext1, int32_t ext2) {
1696 ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
1697 Mutex::Autolock lock(mInputMutex);
1698 switch (msgType) {
1699 case CAMERA2_TRIGGER_AUTOFOCUS:
1700 mAfTriggerId = ext1;
1701 mStartAf = true;
1702 mCancelAf = false;
1703 break;
1704 case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1705 mAfTriggerId = ext1;
1706 mStartAf = false;
1707 mCancelAf = true;
1708 break;
1709 case CAMERA2_TRIGGER_PRECAPTURE_METERING:
1710 mPrecaptureTriggerId = ext1;
1711 mStartPrecapture = true;
1712 break;
1713 default:
1714 ALOGE("%s: Unknown action triggered: %d (arguments %d %d)",
1715 __FUNCTION__, msgType, ext1, ext2);
1716 return BAD_VALUE;
1717 }
1718 return OK;
1719 }
1720
1721 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
1722 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
1723 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
1724 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
1725 // Once every 5 seconds
1726 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
1727 kControlCycleDelay / 5.0 * SEC;
1728 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
1729 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
1730 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
1731 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
1732 // Once every 3 seconds
1733 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
1734 kControlCycleDelay / 3000000000.0;
1735
1736 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
1737 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
1738 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
1739
threadLoop()1740 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
1741 bool afModeChange = false;
1742 bool afTriggered = false;
1743 bool afCancelled = false;
1744 uint8_t afState;
1745 uint8_t afMode;
1746 int32_t afTriggerId;
1747 bool precaptureTriggered = false;
1748 uint8_t aeState;
1749 uint8_t aeMode;
1750 bool aeLock;
1751 int32_t precaptureTriggerId;
1752 nsecs_t nextSleep = kControlCycleDelay;
1753
1754 {
1755 Mutex::Autolock lock(mInputMutex);
1756 if (mStartAf) {
1757 ALOGD("Starting AF trigger processing");
1758 afTriggered = true;
1759 mStartAf = false;
1760 } else if (mCancelAf) {
1761 ALOGD("Starting cancel AF trigger processing");
1762 afCancelled = true;
1763 mCancelAf = false;
1764 }
1765 afState = mAfState;
1766 afMode = mAfMode;
1767 afModeChange = mAfModeChange;
1768 mAfModeChange = false;
1769
1770 afTriggerId = mAfTriggerId;
1771
1772 if(mStartPrecapture) {
1773 ALOGD("Starting precapture trigger processing");
1774 precaptureTriggered = true;
1775 mStartPrecapture = false;
1776 }
1777 aeState = mAeState;
1778 aeMode = mAeMode;
1779 aeLock = mAeLock;
1780 precaptureTriggerId = mPrecaptureTriggerId;
1781 }
1782
1783 if (afCancelled || afModeChange) {
1784 ALOGV("Resetting AF state due to cancel/mode change");
1785 afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1786 updateAfState(afState, afTriggerId);
1787 mAfScanDuration = 0;
1788 mLockAfterPassiveScan = false;
1789 }
1790
1791 if (afTriggered) {
1792 afState = processAfTrigger(afMode, afState);
1793 }
1794
1795 afState = maybeStartAfScan(afMode, afState);
1796 afState = updateAfScan(afMode, afState, &nextSleep);
1797 updateAfState(afState, afTriggerId);
1798
1799 if (precaptureTriggered) {
1800 aeState = processPrecaptureTrigger(aeMode, aeState);
1801 }
1802
1803 aeState = maybeStartAeScan(aeMode, aeLock, aeState);
1804 aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
1805 updateAeState(aeState, precaptureTriggerId);
1806
1807 int ret;
1808 timespec t;
1809 t.tv_sec = 0;
1810 t.tv_nsec = nextSleep;
1811 do {
1812 ret = nanosleep(&t, &t);
1813 } while (ret != 0);
1814
1815 if (mAfScanDuration > 0) {
1816 mAfScanDuration -= nextSleep;
1817 }
1818 if (mAeScanDuration > 0) {
1819 mAeScanDuration -= nextSleep;
1820 }
1821
1822 return true;
1823 }
1824
processAfTrigger(uint8_t afMode,uint8_t afState)1825 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
1826 uint8_t afState) {
1827 switch (afMode) {
1828 case ANDROID_CONTROL_AF_MODE_OFF:
1829 case ANDROID_CONTROL_AF_MODE_EDOF:
1830 // Do nothing
1831 break;
1832 case ANDROID_CONTROL_AF_MODE_MACRO:
1833 case ANDROID_CONTROL_AF_MODE_AUTO:
1834 switch (afState) {
1835 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1836 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1837 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1838 // Start new focusing cycle
1839 mAfScanDuration = ((double)rand() / RAND_MAX) *
1840 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
1841 afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1842 ALOGV("%s: AF scan start, duration %" PRId64 " ms",
1843 __FUNCTION__, mAfScanDuration / 1000000);
1844 break;
1845 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1846 // Ignore new request, already scanning
1847 break;
1848 default:
1849 ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d",
1850 afState);
1851 }
1852 break;
1853 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1854 switch (afState) {
1855 // Picture mode waits for passive scan to complete
1856 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1857 mLockAfterPassiveScan = true;
1858 break;
1859 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1860 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1861 break;
1862 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1863 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1864 break;
1865 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1866 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1867 // Must cancel to get out of these states
1868 break;
1869 default:
1870 ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
1871 afState);
1872 }
1873 break;
1874 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1875 switch (afState) {
1876 // Video mode does not wait for passive scan to complete
1877 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1878 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1879 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1880 break;
1881 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1882 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1883 break;
1884 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1885 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1886 // Must cancel to get out of these states
1887 break;
1888 default:
1889 ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d",
1890 afState);
1891 }
1892 break;
1893 default:
1894 break;
1895 }
1896 return afState;
1897 }
1898
maybeStartAfScan(uint8_t afMode,uint8_t afState)1899 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
1900 uint8_t afState) {
1901 if ((afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO ||
1902 afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) &&
1903 (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
1904 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
1905
1906 bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
1907 if (startScan) {
1908 // Start new passive focusing cycle
1909 mAfScanDuration = ((double)rand() / RAND_MAX) *
1910 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
1911 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
1912 ALOGV("%s: AF passive scan start, duration %" PRId64 " ms",
1913 __FUNCTION__, mAfScanDuration / 1000000);
1914 }
1915 }
1916 return afState;
1917 }
1918
updateAfScan(uint8_t afMode,uint8_t afState,nsecs_t * maxSleep)1919 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
1920 uint8_t afState, nsecs_t *maxSleep) {
1921 if (! (afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
1922 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN ) ) {
1923 return afState;
1924 }
1925
1926 if (mAfScanDuration <= 0) {
1927 ALOGV("%s: AF scan done", __FUNCTION__);
1928 switch (afMode) {
1929 case ANDROID_CONTROL_AF_MODE_MACRO:
1930 case ANDROID_CONTROL_AF_MODE_AUTO: {
1931 bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
1932 if (success) {
1933 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1934 } else {
1935 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1936 }
1937 break;
1938 }
1939 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1940 if (mLockAfterPassiveScan) {
1941 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1942 mLockAfterPassiveScan = false;
1943 } else {
1944 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1945 }
1946 break;
1947 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1948 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1949 break;
1950 default:
1951 ALOGE("Unexpected AF mode in scan state");
1952 }
1953 } else {
1954 if (mAfScanDuration <= *maxSleep) {
1955 *maxSleep = mAfScanDuration;
1956 }
1957 }
1958 return afState;
1959 }
1960
updateAfState(uint8_t newState,int32_t triggerId)1961 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
1962 int32_t triggerId) {
1963 Mutex::Autolock lock(mInputMutex);
1964 if (mAfState != newState) {
1965 ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__,
1966 newState, triggerId);
1967 mAfState = newState;
1968 mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS,
1969 newState, triggerId, 0);
1970 }
1971 }
1972
processPrecaptureTrigger(uint8_t aeMode,uint8_t aeState)1973 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
1974 uint8_t aeState) {
1975 switch (aeMode) {
1976 case ANDROID_CONTROL_AE_MODE_OFF:
1977 // Don't do anything for these
1978 return aeState;
1979 case ANDROID_CONTROL_AE_MODE_ON:
1980 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
1981 case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
1982 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
1983 // Trigger a precapture cycle
1984 aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1985 mAeScanDuration = ((double)rand() / RAND_MAX) *
1986 (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
1987 kMinPrecaptureAeDuration;
1988 ALOGD("%s: AE precapture scan start, duration %" PRId64 " ms",
1989 __FUNCTION__, mAeScanDuration / 1000000);
1990
1991 }
1992 return aeState;
1993 }
1994
maybeStartAeScan(uint8_t aeMode,bool aeLocked,uint8_t aeState)1995 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
1996 bool aeLocked,
1997 uint8_t aeState) {
1998 if (aeLocked) return aeState;
1999 switch (aeMode) {
2000 case ANDROID_CONTROL_AE_MODE_OFF:
2001 break;
2002 case ANDROID_CONTROL_AE_MODE_ON:
2003 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
2004 case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
2005 case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
2006 if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
2007 aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
2008
2009 bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
2010 if (startScan) {
2011 mAeScanDuration = ((double)rand() / RAND_MAX) *
2012 (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
2013 aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2014 ALOGV("%s: AE scan start, duration %" PRId64 " ms",
2015 __FUNCTION__, mAeScanDuration / 1000000);
2016 }
2017 }
2018 }
2019
2020 return aeState;
2021 }
2022
updateAeScan(uint8_t aeMode,bool aeLock,uint8_t aeState,nsecs_t * maxSleep)2023 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
2024 bool aeLock, uint8_t aeState, nsecs_t *maxSleep) {
2025 if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2026 mAeScanDuration = 0;
2027 aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2028 } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
2029 (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
2030 if (mAeScanDuration <= 0) {
2031 ALOGV("%s: AE scan done", __FUNCTION__);
2032 aeState = aeLock ?
2033 ANDROID_CONTROL_AE_STATE_LOCKED :ANDROID_CONTROL_AE_STATE_CONVERGED;
2034
2035 Mutex::Autolock lock(mInputMutex);
2036 mExposureTime = kNormalExposureTime;
2037 } else {
2038 if (mAeScanDuration <= *maxSleep) {
2039 *maxSleep = mAeScanDuration;
2040 }
2041
2042 int64_t exposureDelta =
2043 ((double)rand() / RAND_MAX) * 2 * kExposureJump -
2044 kExposureJump;
2045 Mutex::Autolock lock(mInputMutex);
2046 mExposureTime = mExposureTime + exposureDelta;
2047 if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
2048 }
2049 }
2050
2051 return aeState;
2052 }
2053
2054
updateAeState(uint8_t newState,int32_t triggerId)2055 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
2056 int32_t triggerId) {
2057 Mutex::Autolock lock(mInputMutex);
2058 if (mAeState != newState) {
2059 ALOGV("%s: Autoexposure state now %d, id %d", __FUNCTION__,
2060 newState, triggerId);
2061 mAeState = newState;
2062 mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
2063 newState, triggerId, 0);
2064 }
2065 }
2066
2067 /** Private methods */
2068
constructStaticInfo(camera_metadata_t ** info,bool sizeRequest) const2069 status_t EmulatedFakeCamera2::constructStaticInfo(
2070 camera_metadata_t **info,
2071 bool sizeRequest) const {
2072
2073 size_t entryCount = 0;
2074 size_t dataCount = 0;
2075 status_t ret;
2076
2077 #define ADD_OR_SIZE( tag, data, count ) \
2078 if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
2079 tag, data, count) ) != OK ) return ret
2080
2081 // android.lens
2082
2083 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
2084 const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
2085 ADD_OR_SIZE(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
2086 &minFocusDistance, 1);
2087 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
2088 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
2089 ADD_OR_SIZE(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
2090 &hyperFocalDistance, 1);
2091
2092 static const float focalLength = 3.30f; // mm
2093 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
2094 &focalLength, 1);
2095 static const float aperture = 2.8f;
2096 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
2097 &aperture, 1);
2098 static const float filterDensity = 0;
2099 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
2100 &filterDensity, 1);
2101 static const uint8_t availableOpticalStabilization =
2102 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
2103 ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
2104 &availableOpticalStabilization, 1);
2105
2106 static const int32_t lensShadingMapSize[] = {1, 1};
2107 ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
2108 sizeof(lensShadingMapSize)/sizeof(int32_t));
2109
2110 int32_t lensFacing = mFacingBack ?
2111 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2112 ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
2113
2114 // android.sensor
2115
2116 ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
2117 Sensor::kExposureTimeRange, 2);
2118
2119 ADD_OR_SIZE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
2120 &Sensor::kFrameDurationRange[1], 1);
2121
2122 ADD_OR_SIZE(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
2123 Sensor::kSensitivityRange,
2124 sizeof(Sensor::kSensitivityRange)
2125 /sizeof(int32_t));
2126
2127 ADD_OR_SIZE(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
2128 &Sensor::kColorFilterArrangement, 1);
2129
2130 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
2131 ADD_OR_SIZE(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
2132 sensorPhysicalSize, 2);
2133
2134 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
2135 ADD_OR_SIZE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
2136 pixelArray, 2);
2137
2138 ADD_OR_SIZE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
2139 pixelArray, 2);
2140
2141 ADD_OR_SIZE(ANDROID_SENSOR_INFO_WHITE_LEVEL,
2142 &Sensor::kMaxRawValue, 1);
2143
2144 static const int32_t blackLevelPattern[4] = {
2145 static_cast<int32_t>(Sensor::kBlackLevel),
2146 static_cast<int32_t>(Sensor::kBlackLevel),
2147 static_cast<int32_t>(Sensor::kBlackLevel),
2148 static_cast<int32_t>(Sensor::kBlackLevel)
2149 };
2150 ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2151 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
2152
2153 //TODO: sensor color calibration fields
2154
2155 // android.flash
2156 static const uint8_t flashAvailable = 0;
2157 ADD_OR_SIZE(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
2158
2159 static const int64_t flashChargeDuration = 0;
2160 ADD_OR_SIZE(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
2161
2162 // android.tonemap
2163
2164 static const int32_t tonemapCurvePoints = 128;
2165 ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
2166
2167 // android.scaler
2168
2169 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
2170 kAvailableFormats,
2171 sizeof(kAvailableFormats)/sizeof(uint32_t));
2172
2173 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2174 kAvailableRawSizes,
2175 sizeof(kAvailableRawSizes)/sizeof(uint32_t));
2176
2177 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2178 kAvailableRawMinDurations,
2179 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
2180
2181 if (mFacingBack) {
2182 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2183 kAvailableProcessedSizesBack,
2184 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
2185 } else {
2186 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2187 kAvailableProcessedSizesFront,
2188 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
2189 }
2190
2191 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2192 kAvailableProcessedMinDurations,
2193 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
2194
2195 if (mFacingBack) {
2196 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2197 kAvailableJpegSizesBack,
2198 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
2199 } else {
2200 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2201 kAvailableJpegSizesFront,
2202 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
2203 }
2204
2205 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2206 kAvailableJpegMinDurations,
2207 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
2208
2209 static const float maxZoom = 10;
2210 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2211 &maxZoom, 1);
2212
2213 // android.jpeg
2214
2215 static const int32_t jpegThumbnailSizes[] = {
2216 0, 0,
2217 160, 120,
2218 320, 240
2219 };
2220 ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2221 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
2222
2223 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
2224 ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
2225
2226 // android.stats
2227
2228 static const uint8_t availableFaceDetectModes[] = {
2229 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
2230 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
2231 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
2232 };
2233
2234 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
2235 availableFaceDetectModes,
2236 sizeof(availableFaceDetectModes));
2237
2238 static const int32_t maxFaceCount = 8;
2239 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
2240 &maxFaceCount, 1);
2241
2242 static const int32_t histogramSize = 64;
2243 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
2244 &histogramSize, 1);
2245
2246 static const int32_t maxHistogramCount = 1000;
2247 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
2248 &maxHistogramCount, 1);
2249
2250 static const int32_t sharpnessMapSize[2] = {64, 64};
2251 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
2252 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
2253
2254 static const int32_t maxSharpnessMapValue = 1000;
2255 ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
2256 &maxSharpnessMapValue, 1);
2257
2258 // android.control
2259
2260 static const uint8_t availableSceneModes[] = {
2261 ANDROID_CONTROL_SCENE_MODE_DISABLED
2262 };
2263 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2264 availableSceneModes, sizeof(availableSceneModes));
2265
2266 static const uint8_t availableEffects[] = {
2267 ANDROID_CONTROL_EFFECT_MODE_OFF
2268 };
2269 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2270 availableEffects, sizeof(availableEffects));
2271
2272 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
2273 ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
2274 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
2275
2276 static const uint8_t availableAeModes[] = {
2277 ANDROID_CONTROL_AE_MODE_OFF,
2278 ANDROID_CONTROL_AE_MODE_ON
2279 };
2280 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2281 availableAeModes, sizeof(availableAeModes));
2282
2283 static const camera_metadata_rational exposureCompensationStep = {
2284 1, 3
2285 };
2286 ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2287 &exposureCompensationStep, 1);
2288
2289 int32_t exposureCompensationRange[] = {-9, 9};
2290 ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2291 exposureCompensationRange,
2292 sizeof(exposureCompensationRange)/sizeof(int32_t));
2293
2294 static const int32_t availableTargetFpsRanges[] = {
2295 5, 30, 15, 30
2296 };
2297 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2298 availableTargetFpsRanges,
2299 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
2300
2301 static const uint8_t availableAntibandingModes[] = {
2302 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2303 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
2304 };
2305 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2306 availableAntibandingModes, sizeof(availableAntibandingModes));
2307
2308 static const uint8_t availableAwbModes[] = {
2309 ANDROID_CONTROL_AWB_MODE_OFF,
2310 ANDROID_CONTROL_AWB_MODE_AUTO,
2311 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
2312 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
2313 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
2314 ANDROID_CONTROL_AWB_MODE_SHADE
2315 };
2316 ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2317 availableAwbModes, sizeof(availableAwbModes));
2318
2319 static const uint8_t availableAfModesBack[] = {
2320 ANDROID_CONTROL_AF_MODE_OFF,
2321 ANDROID_CONTROL_AF_MODE_AUTO,
2322 ANDROID_CONTROL_AF_MODE_MACRO,
2323 ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
2324 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
2325 };
2326
2327 static const uint8_t availableAfModesFront[] = {
2328 ANDROID_CONTROL_AF_MODE_OFF
2329 };
2330
2331 if (mFacingBack) {
2332 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2333 availableAfModesBack, sizeof(availableAfModesBack));
2334 } else {
2335 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2336 availableAfModesFront, sizeof(availableAfModesFront));
2337 }
2338
2339 static const uint8_t availableVstabModes[] = {
2340 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2341 };
2342 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2343 availableVstabModes, sizeof(availableVstabModes));
2344
2345 #undef ADD_OR_SIZE
2346 /** Allocate metadata if sizing */
2347 if (sizeRequest) {
2348 ALOGV("Allocating %zu entries, %zu extra bytes for "
2349 "static camera info",
2350 entryCount, dataCount);
2351 *info = allocate_camera_metadata(entryCount, dataCount);
2352 if (*info == NULL) {
2353 ALOGE("Unable to allocate camera static info"
2354 "(%zu entries, %zu bytes extra data)",
2355 entryCount, dataCount);
2356 return NO_MEMORY;
2357 }
2358 }
2359 return OK;
2360 }
2361
constructDefaultRequest(int request_template,camera_metadata_t ** request,bool sizeRequest) const2362 status_t EmulatedFakeCamera2::constructDefaultRequest(
2363 int request_template,
2364 camera_metadata_t **request,
2365 bool sizeRequest) const {
2366
2367 size_t entryCount = 0;
2368 size_t dataCount = 0;
2369 status_t ret;
2370
2371 #define ADD_OR_SIZE( tag, data, count ) \
2372 if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
2373 tag, data, count) ) != OK ) return ret
2374
2375 /** android.request */
2376
2377 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2378 ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
2379
2380 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
2381 ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
2382
2383 static const int32_t id = 0;
2384 ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
2385
2386 static const int32_t frameCount = 0;
2387 ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
2388
2389 // OUTPUT_STREAMS set by user
2390 entryCount += 1;
2391 dataCount += 5; // TODO: Should be maximum stream number
2392
2393 /** android.lens */
2394
2395 static const float focusDistance = 0;
2396 ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
2397
2398 static const float aperture = 2.8f;
2399 ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
2400
2401 static const float focalLength = 5.0f;
2402 ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
2403
2404 static const float filterDensity = 0;
2405 ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
2406
2407 static const uint8_t opticalStabilizationMode =
2408 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
2409 ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
2410 &opticalStabilizationMode, 1);
2411
2412 // FOCUS_RANGE set only in frame
2413
2414 /** android.sensor */
2415
2416 static const int64_t exposureTime = 10 * MSEC;
2417 ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
2418
2419 static const int64_t frameDuration = 33333333L; // 1/30 s
2420 ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
2421
2422 static const int32_t sensitivity = 100;
2423 ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
2424
2425 // TIMESTAMP set only in frame
2426
2427 /** android.flash */
2428
2429 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
2430 ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
2431
2432 static const uint8_t flashPower = 10;
2433 ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
2434
2435 static const int64_t firingTime = 0;
2436 ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
2437
2438 /** Processing block modes */
2439 uint8_t hotPixelMode = 0;
2440 uint8_t demosaicMode = 0;
2441 uint8_t noiseMode = 0;
2442 uint8_t shadingMode = 0;
2443 uint8_t colorMode = 0;
2444 uint8_t tonemapMode = 0;
2445 uint8_t edgeMode = 0;
2446 switch (request_template) {
2447 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2448 // fall-through
2449 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2450 // fall-through
2451 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2452 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
2453 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
2454 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
2455 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
2456 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
2457 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
2458 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
2459 break;
2460 case CAMERA2_TEMPLATE_PREVIEW:
2461 // fall-through
2462 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2463 // fall-through
2464 default:
2465 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
2466 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
2467 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
2468 shadingMode = ANDROID_SHADING_MODE_FAST;
2469 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
2470 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
2471 edgeMode = ANDROID_EDGE_MODE_FAST;
2472 break;
2473 }
2474 ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2475 ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
2476 ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
2477 ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
2478 ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
2479 ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
2480 ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
2481
2482 /** android.noise */
2483 static const uint8_t noiseStrength = 5;
2484 ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
2485
2486 /** android.color */
2487 static const float colorTransform[9] = {
2488 1.0f, 0.f, 0.f,
2489 0.f, 1.f, 0.f,
2490 0.f, 0.f, 1.f
2491 };
2492 ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
2493
2494 /** android.tonemap */
2495 static const float tonemapCurve[4] = {
2496 0.f, 0.f,
2497 1.f, 1.f
2498 };
2499 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
2500 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
2501 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
2502
2503 /** android.edge */
2504 static const uint8_t edgeStrength = 5;
2505 ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2506
2507 /** android.scaler */
2508 static const int32_t cropRegion[3] = {
2509 0, 0, static_cast<int32_t>(mSensorWidth)
2510 };
2511 ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
2512
2513 /** android.jpeg */
2514 static const int32_t jpegQuality = 80;
2515 ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
2516
2517 static const int32_t thumbnailSize[2] = {
2518 640, 480
2519 };
2520 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
2521
2522 static const int32_t thumbnailQuality = 80;
2523 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
2524
2525 static const double gpsCoordinates[2] = {
2526 0, 0
2527 };
2528 ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
2529
2530 static const uint8_t gpsProcessingMethod[32] = "None";
2531 ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
2532
2533 static const int64_t gpsTimestamp = 0;
2534 ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
2535
2536 static const int32_t jpegOrientation = 0;
2537 ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
2538
2539 /** android.stats */
2540
2541 static const uint8_t faceDetectMode =
2542 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
2543 ADD_OR_SIZE(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
2544
2545 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
2546 ADD_OR_SIZE(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
2547
2548 static const uint8_t sharpnessMapMode =
2549 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
2550 ADD_OR_SIZE(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
2551
2552 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
2553 // sharpnessMap only in frames
2554
2555 /** android.control */
2556
2557 uint8_t controlIntent = 0;
2558 switch (request_template) {
2559 case CAMERA2_TEMPLATE_PREVIEW:
2560 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
2561 break;
2562 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2563 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
2564 break;
2565 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2566 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
2567 break;
2568 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2569 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
2570 break;
2571 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2572 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
2573 break;
2574 default:
2575 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
2576 break;
2577 }
2578 ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2579
2580 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
2581 ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
2582
2583 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
2584 ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2585
2586 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
2587 ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2588
2589 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
2590 ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2591
2592 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2593 ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2594
2595 static const int32_t controlRegions[5] = {
2596 0, 0,
2597 static_cast<int32_t>(mSensorWidth),
2598 static_cast<int32_t>(mSensorHeight),
2599 1000
2600 };
2601 ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
2602
2603 static const int32_t aeExpCompensation = 0;
2604 ADD_OR_SIZE(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2605
2606 static const int32_t aeTargetFpsRange[2] = {
2607 10, 30
2608 };
2609 ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
2610
2611 static const uint8_t aeAntibandingMode =
2612 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
2613 ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
2614
2615 static const uint8_t awbMode =
2616 ANDROID_CONTROL_AWB_MODE_AUTO;
2617 ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2618
2619 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2620 ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2621
2622 ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
2623
2624 uint8_t afMode = 0;
2625 switch (request_template) {
2626 case CAMERA2_TEMPLATE_PREVIEW:
2627 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2628 break;
2629 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2630 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2631 break;
2632 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2633 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
2634 break;
2635 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2636 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
2637 break;
2638 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2639 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
2640 break;
2641 default:
2642 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
2643 break;
2644 }
2645 ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
2646
2647 ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
2648
2649 static const uint8_t vstabMode =
2650 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
2651 ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
2652
2653 // aeState, awbState, afState only in frame
2654
2655 /** Allocate metadata if sizing */
2656 if (sizeRequest) {
2657 ALOGV("Allocating %zu entries, %zu extra bytes for "
2658 "request template type %d",
2659 entryCount, dataCount, request_template);
2660 *request = allocate_camera_metadata(entryCount, dataCount);
2661 if (*request == NULL) {
2662 ALOGE("Unable to allocate new request template type %d "
2663 "(%zu entries, %zu bytes extra data)", request_template,
2664 entryCount, dataCount);
2665 return NO_MEMORY;
2666 }
2667 }
2668 return OK;
2669 #undef ADD_OR_SIZE
2670 }
2671
addOrSize(camera_metadata_t * request,bool sizeRequest,size_t * entryCount,size_t * dataCount,uint32_t tag,const void * entryData,size_t entryDataCount)2672 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
2673 bool sizeRequest,
2674 size_t *entryCount,
2675 size_t *dataCount,
2676 uint32_t tag,
2677 const void *entryData,
2678 size_t entryDataCount) {
2679 if (!sizeRequest) {
2680 return add_camera_metadata_entry(request, tag, entryData,
2681 entryDataCount);
2682 } else {
2683 int type = get_camera_metadata_tag_type(tag);
2684 if (type < 0 ) return BAD_VALUE;
2685 (*entryCount)++;
2686 (*dataCount) += calculate_camera_metadata_entry_data_size(type,
2687 entryDataCount);
2688 return OK;
2689 }
2690 }
2691
isStreamInUse(uint32_t id)2692 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
2693 // Assumes mMutex is locked; otherwise new requests could enter
2694 // configureThread while readoutThread is being checked
2695
2696 // Order of isStreamInUse calls matters
2697 if (mConfigureThread->isStreamInUse(id) ||
2698 mReadoutThread->isStreamInUse(id) ||
2699 mJpegCompressor->isStreamInUse(id) ) {
2700 ALOGE("%s: Stream %d is in use in active requests!",
2701 __FUNCTION__, id);
2702 return true;
2703 }
2704 return false;
2705 }
2706
isReprocessStreamInUse(uint32_t id)2707 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
2708 // TODO: implement
2709 return false;
2710 }
2711
getStreamInfo(uint32_t streamId)2712 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
2713 Mutex::Autolock lock(mMutex);
2714
2715 return mStreams.valueFor(streamId);
2716 }
2717
getReprocessStreamInfo(uint32_t streamId)2718 const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
2719 Mutex::Autolock lock(mMutex);
2720
2721 return mReprocessStreams.valueFor(streamId);
2722 }
2723
2724 }; /* namespace android */
2725