1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 //#define LOG_NDEBUG 0
23 #define LOG_TAG "EmulatedCamera_FakeCamera2"
24 #include <utils/Log.h>
25
26 #include "EmulatedFakeCamera2.h"
27 #include "EmulatedCameraFactory.h"
28 #include <ui/Rect.h>
29 #include <ui/GraphicBufferMapper.h>
30 #include "gralloc_cb.h"
31
32 namespace android {
33
34 const int64_t USEC = 1000LL;
35 const int64_t MSEC = USEC * 1000LL;
36 const int64_t SEC = MSEC * 1000LL;
37
38 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
39 HAL_PIXEL_FORMAT_RAW_SENSOR,
40 HAL_PIXEL_FORMAT_BLOB,
41 HAL_PIXEL_FORMAT_RGBA_8888,
42 // HAL_PIXEL_FORMAT_YV12,
43 HAL_PIXEL_FORMAT_YCrCb_420_SP
44 };
45
46 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
47 640, 480
48 // Sensor::kResolution[0], Sensor::kResolution[1]
49 };
50
51 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
52 Sensor::kFrameDurationRange[0]
53 };
54
55 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
56 640, 480, 320, 240
57 // Sensor::kResolution[0], Sensor::kResolution[1]
58 };
59
60 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
61 320, 240, 160, 120
62 // Sensor::kResolution[0], Sensor::kResolution[1]
63 };
64
65 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
66 Sensor::kFrameDurationRange[0]
67 };
68
69 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
70 640, 480
71 // Sensor::kResolution[0], Sensor::kResolution[1]
72 };
73
74 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
75 320, 240
76 // Sensor::kResolution[0], Sensor::kResolution[1]
77 };
78
79
80 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
81 Sensor::kFrameDurationRange[0]
82 };
83
84
EmulatedFakeCamera2(int cameraId,bool facingBack,struct hw_module_t * module)85 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
86 bool facingBack,
87 struct hw_module_t* module)
88 : EmulatedCamera2(cameraId,module),
89 mFacingBack(facingBack)
90 {
91 ALOGD("Constructing emulated fake camera 2 facing %s",
92 facingBack ? "back" : "front");
93 }
94
~EmulatedFakeCamera2()95 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
96 if (mCameraInfo != NULL) {
97 free_camera_metadata(mCameraInfo);
98 }
99 }
100
101 /****************************************************************************
102 * Public API overrides
103 ***************************************************************************/
104
Initialize()105 status_t EmulatedFakeCamera2::Initialize() {
106 status_t res;
107
108 set_camera_metadata_vendor_tag_ops(
109 static_cast<vendor_tag_query_ops_t*>(&mVendorTagOps));
110
111 res = constructStaticInfo(&mCameraInfo, true);
112 if (res != OK) {
113 ALOGE("%s: Unable to allocate static info: %s (%d)",
114 __FUNCTION__, strerror(-res), res);
115 return res;
116 }
117 res = constructStaticInfo(&mCameraInfo, false);
118 if (res != OK) {
119 ALOGE("%s: Unable to fill in static info: %s (%d)",
120 __FUNCTION__, strerror(-res), res);
121 return res;
122 }
123 if (res != OK) return res;
124
125 mNextStreamId = 1;
126 mNextReprocessStreamId = 1;
127 mRawStreamCount = 0;
128 mProcessedStreamCount = 0;
129 mJpegStreamCount = 0;
130 mReprocessStreamCount = 0;
131
132 return NO_ERROR;
133 }
134
135 /****************************************************************************
136 * Camera module API overrides
137 ***************************************************************************/
138
connectCamera(hw_device_t ** device)139 status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
140 status_t res;
141 ALOGV("%s", __FUNCTION__);
142
143 mConfigureThread = new ConfigureThread(this);
144 mReadoutThread = new ReadoutThread(this);
145 mControlThread = new ControlThread(this);
146 mSensor = new Sensor(this);
147 mJpegCompressor = new JpegCompressor(this);
148
149 mNextStreamId = 1;
150 mNextReprocessStreamId = 1;
151
152 res = mSensor->startUp();
153 if (res != NO_ERROR) return res;
154
155 res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
156 if (res != NO_ERROR) return res;
157
158 res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
159 if (res != NO_ERROR) return res;
160
161 res = mControlThread->run("EmulatedFakeCamera2::controlThread");
162 if (res != NO_ERROR) return res;
163
164 return EmulatedCamera2::connectCamera(device);
165 }
166
closeCamera()167 status_t EmulatedFakeCamera2::closeCamera() {
168 Mutex::Autolock l(mMutex);
169
170 status_t res;
171 ALOGV("%s", __FUNCTION__);
172
173 res = mSensor->shutDown();
174 if (res != NO_ERROR) {
175 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
176 return res;
177 }
178
179 mConfigureThread->requestExit();
180 mReadoutThread->requestExit();
181 mControlThread->requestExit();
182 mJpegCompressor->cancel();
183
184 mConfigureThread->join();
185 mReadoutThread->join();
186 mControlThread->join();
187
188 ALOGV("%s exit", __FUNCTION__);
189 return NO_ERROR;
190 }
191
getCameraInfo(struct camera_info * info)192 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
193 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
194 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
195 return EmulatedCamera2::getCameraInfo(info);
196 }
197
198 /****************************************************************************
199 * Camera device API overrides
200 ***************************************************************************/
201
202 /** Request input queue */
203
requestQueueNotify()204 int EmulatedFakeCamera2::requestQueueNotify() {
205 ALOGV("Request queue notification received");
206
207 ALOG_ASSERT(mRequestQueueSrc != NULL,
208 "%s: Request queue src not set, but received queue notification!",
209 __FUNCTION__);
210 ALOG_ASSERT(mFrameQueueDst != NULL,
211 "%s: Request queue src not set, but received queue notification!",
212 __FUNCTION__);
213 ALOG_ASSERT(mStreams.size() != 0,
214 "%s: No streams allocated, but received queue notification!",
215 __FUNCTION__);
216 return mConfigureThread->newRequestAvailable();
217 }
218
getInProgressCount()219 int EmulatedFakeCamera2::getInProgressCount() {
220 Mutex::Autolock l(mMutex);
221
222 int requestCount = 0;
223 requestCount += mConfigureThread->getInProgressCount();
224 requestCount += mReadoutThread->getInProgressCount();
225 requestCount += mJpegCompressor->isBusy() ? 1 : 0;
226
227 return requestCount;
228 }
229
constructDefaultRequest(int request_template,camera_metadata_t ** request)230 int EmulatedFakeCamera2::constructDefaultRequest(
231 int request_template,
232 camera_metadata_t **request) {
233
234 if (request == NULL) return BAD_VALUE;
235 if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
236 return BAD_VALUE;
237 }
238 status_t res;
239 // Pass 1, calculate size and allocate
240 res = constructDefaultRequest(request_template,
241 request,
242 true);
243 if (res != OK) {
244 return res;
245 }
246 // Pass 2, build request
247 res = constructDefaultRequest(request_template,
248 request,
249 false);
250 if (res != OK) {
251 ALOGE("Unable to populate new request for template %d",
252 request_template);
253 }
254
255 return res;
256 }
257
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)258 int EmulatedFakeCamera2::allocateStream(
259 uint32_t width,
260 uint32_t height,
261 int format,
262 const camera2_stream_ops_t *stream_ops,
263 uint32_t *stream_id,
264 uint32_t *format_actual,
265 uint32_t *usage,
266 uint32_t *max_buffers) {
267 Mutex::Autolock l(mMutex);
268
269 // Temporary shim until FORMAT_ZSL is removed
270 if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
271 format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
272 }
273
274 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
275 unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
276 unsigned int formatIdx = 0;
277 unsigned int sizeOffsetIdx = 0;
278 for (; formatIdx < numFormats; formatIdx++) {
279 if (format == (int)kAvailableFormats[formatIdx]) break;
280 }
281 if (formatIdx == numFormats) {
282 ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
283 return BAD_VALUE;
284 }
285 }
286
287 const uint32_t *availableSizes;
288 size_t availableSizeCount;
289 switch (format) {
290 case HAL_PIXEL_FORMAT_RAW_SENSOR:
291 availableSizes = kAvailableRawSizes;
292 availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
293 break;
294 case HAL_PIXEL_FORMAT_BLOB:
295 availableSizes = mFacingBack ?
296 kAvailableJpegSizesBack : kAvailableJpegSizesFront;
297 availableSizeCount = mFacingBack ?
298 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t) :
299 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t);
300 break;
301 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
302 case HAL_PIXEL_FORMAT_RGBA_8888:
303 case HAL_PIXEL_FORMAT_YV12:
304 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
305 availableSizes = mFacingBack ?
306 kAvailableProcessedSizesBack : kAvailableProcessedSizesFront;
307 availableSizeCount = mFacingBack ?
308 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t) :
309 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t);
310 break;
311 default:
312 ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
313 return BAD_VALUE;
314 }
315
316 unsigned int resIdx = 0;
317 for (; resIdx < availableSizeCount; resIdx++) {
318 if (availableSizes[resIdx * 2] == width &&
319 availableSizes[resIdx * 2 + 1] == height) break;
320 }
321 if (resIdx == availableSizeCount) {
322 ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
323 format, width, height);
324 return BAD_VALUE;
325 }
326
327 switch (format) {
328 case HAL_PIXEL_FORMAT_RAW_SENSOR:
329 if (mRawStreamCount >= kMaxRawStreamCount) {
330 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
331 __FUNCTION__, mRawStreamCount);
332 return INVALID_OPERATION;
333 }
334 mRawStreamCount++;
335 break;
336 case HAL_PIXEL_FORMAT_BLOB:
337 if (mJpegStreamCount >= kMaxJpegStreamCount) {
338 ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
339 __FUNCTION__, mJpegStreamCount);
340 return INVALID_OPERATION;
341 }
342 mJpegStreamCount++;
343 break;
344 default:
345 if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
346 ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
347 __FUNCTION__, mProcessedStreamCount);
348 return INVALID_OPERATION;
349 }
350 mProcessedStreamCount++;
351 }
352
353 Stream newStream;
354 newStream.ops = stream_ops;
355 newStream.width = width;
356 newStream.height = height;
357 newStream.format = format;
358 // TODO: Query stride from gralloc
359 newStream.stride = width;
360
361 mStreams.add(mNextStreamId, newStream);
362
363 *stream_id = mNextStreamId;
364 if (format_actual) *format_actual = format;
365 *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
366 *max_buffers = kMaxBufferCount;
367
368 ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
369 *stream_id, width, height, format, *usage, *max_buffers);
370
371 mNextStreamId++;
372 return NO_ERROR;
373 }
374
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t * buffers)375 int EmulatedFakeCamera2::registerStreamBuffers(
376 uint32_t stream_id,
377 int num_buffers,
378 buffer_handle_t *buffers) {
379 Mutex::Autolock l(mMutex);
380
381 ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
382 stream_id, num_buffers);
383 // Need to find out what the final concrete pixel format for our stream is
384 // Assumes that all buffers have the same format.
385 if (num_buffers < 1) {
386 ALOGE("%s: Stream %d only has %d buffers!",
387 __FUNCTION__, stream_id, num_buffers);
388 return BAD_VALUE;
389 }
390 const cb_handle_t *streamBuffer =
391 reinterpret_cast<const cb_handle_t*>(buffers[0]);
392
393 int finalFormat = streamBuffer->format;
394
395 if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
396 ALOGE("%s: Stream %d: Bad final pixel format "
397 "HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; "
398 "concrete pixel format required!", __FUNCTION__, stream_id);
399 return BAD_VALUE;
400 }
401
402 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
403 if (streamIndex < 0) {
404 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
405 return BAD_VALUE;
406 }
407
408 Stream &stream = mStreams.editValueAt(streamIndex);
409
410 ALOGV("%s: Stream %d format set to %x, previously %x",
411 __FUNCTION__, stream_id, finalFormat, stream.format);
412
413 stream.format = finalFormat;
414
415 return NO_ERROR;
416 }
417
releaseStream(uint32_t stream_id)418 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
419 Mutex::Autolock l(mMutex);
420
421 ssize_t streamIndex = mStreams.indexOfKey(stream_id);
422 if (streamIndex < 0) {
423 ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
424 return BAD_VALUE;
425 }
426
427 if (isStreamInUse(stream_id)) {
428 ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
429 stream_id);
430 return BAD_VALUE;
431 }
432
433 switch(mStreams.valueAt(streamIndex).format) {
434 case HAL_PIXEL_FORMAT_RAW_SENSOR:
435 mRawStreamCount--;
436 break;
437 case HAL_PIXEL_FORMAT_BLOB:
438 mJpegStreamCount--;
439 break;
440 default:
441 mProcessedStreamCount--;
442 break;
443 }
444
445 mStreams.removeItemsAt(streamIndex);
446
447 return NO_ERROR;
448 }
449
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * stream_ops,uint32_t * stream_id)450 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
451 uint32_t output_stream_id,
452 const camera2_stream_in_ops_t *stream_ops,
453 uint32_t *stream_id) {
454 Mutex::Autolock l(mMutex);
455
456 ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
457 if (baseStreamIndex < 0) {
458 ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
459 return BAD_VALUE;
460 }
461
462 const Stream &baseStream = mStreams[baseStreamIndex];
463
464 // We'll reprocess anything we produced
465
466 if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
467 ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
468 __FUNCTION__, mReprocessStreamCount);
469 return INVALID_OPERATION;
470 }
471 mReprocessStreamCount++;
472
473 ReprocessStream newStream;
474 newStream.ops = stream_ops;
475 newStream.width = baseStream.width;
476 newStream.height = baseStream.height;
477 newStream.format = baseStream.format;
478 newStream.stride = baseStream.stride;
479 newStream.sourceStreamId = output_stream_id;
480
481 *stream_id = mNextReprocessStreamId;
482 mReprocessStreams.add(mNextReprocessStreamId, newStream);
483
484 ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
485 *stream_id, newStream.width, newStream.height, newStream.format,
486 output_stream_id);
487
488 mNextReprocessStreamId++;
489 return NO_ERROR;
490 }
491
releaseReprocessStream(uint32_t stream_id)492 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
493 Mutex::Autolock l(mMutex);
494
495 ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
496 if (streamIndex < 0) {
497 ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
498 return BAD_VALUE;
499 }
500
501 if (isReprocessStreamInUse(stream_id)) {
502 ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
503 stream_id);
504 return BAD_VALUE;
505 }
506
507 mReprocessStreamCount--;
508 mReprocessStreams.removeItemsAt(streamIndex);
509
510 return NO_ERROR;
511 }
512
triggerAction(uint32_t trigger_id,int32_t ext1,int32_t ext2)513 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
514 int32_t ext1,
515 int32_t ext2) {
516 Mutex::Autolock l(mMutex);
517 return mControlThread->triggerAction(trigger_id,
518 ext1, ext2);
519 }
520
521 /** Custom tag definitions */
522
523 // Emulator camera metadata sections
524 enum {
525 EMULATOR_SCENE = VENDOR_SECTION,
526 END_EMULATOR_SECTIONS
527 };
528
529 enum {
530 EMULATOR_SCENE_START = EMULATOR_SCENE << 16,
531 };
532
533 // Emulator camera metadata tags
534 enum {
535 // Hour of day to use for lighting calculations (0-23). Default: 12
536 EMULATOR_SCENE_HOUROFDAY = EMULATOR_SCENE_START,
537 EMULATOR_SCENE_END
538 };
539
540 unsigned int emulator_metadata_section_bounds[END_EMULATOR_SECTIONS -
541 VENDOR_SECTION][2] = {
542 { EMULATOR_SCENE_START, EMULATOR_SCENE_END }
543 };
544
545 const char *emulator_metadata_section_names[END_EMULATOR_SECTIONS -
546 VENDOR_SECTION] = {
547 "com.android.emulator.scene"
548 };
549
550 typedef struct emulator_tag_info {
551 const char *tag_name;
552 uint8_t tag_type;
553 } emulator_tag_info_t;
554
555 emulator_tag_info_t emulator_scene[EMULATOR_SCENE_END - EMULATOR_SCENE_START] = {
556 { "hourOfDay", TYPE_INT32 }
557 };
558
559 emulator_tag_info_t *tag_info[END_EMULATOR_SECTIONS -
560 VENDOR_SECTION] = {
561 emulator_scene
562 };
563
getVendorSectionName(uint32_t tag)564 const char* EmulatedFakeCamera2::getVendorSectionName(uint32_t tag) {
565 ALOGV("%s", __FUNCTION__);
566 uint32_t section = tag >> 16;
567 if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
568 return emulator_metadata_section_names[section - VENDOR_SECTION];
569 }
570
getVendorTagName(uint32_t tag)571 const char* EmulatedFakeCamera2::getVendorTagName(uint32_t tag) {
572 ALOGV("%s", __FUNCTION__);
573 uint32_t section = tag >> 16;
574 if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
575 uint32_t section_index = section - VENDOR_SECTION;
576 if (tag >= emulator_metadata_section_bounds[section_index][1]) {
577 return NULL;
578 }
579 uint32_t tag_index = tag & 0xFFFF;
580 return tag_info[section_index][tag_index].tag_name;
581 }
582
getVendorTagType(uint32_t tag)583 int EmulatedFakeCamera2::getVendorTagType(uint32_t tag) {
584 ALOGV("%s", __FUNCTION__);
585 uint32_t section = tag >> 16;
586 if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return -1;
587 uint32_t section_index = section - VENDOR_SECTION;
588 if (tag >= emulator_metadata_section_bounds[section_index][1]) {
589 return -1;
590 }
591 uint32_t tag_index = tag & 0xFFFF;
592 return tag_info[section_index][tag_index].tag_type;
593 }
594
595 /** Shutdown and debug methods */
596
dump(int fd)597 int EmulatedFakeCamera2::dump(int fd) {
598 String8 result;
599
600 result.appendFormat(" Camera HAL device: EmulatedFakeCamera2\n");
601 result.appendFormat(" Streams:\n");
602 for (size_t i = 0; i < mStreams.size(); i++) {
603 int id = mStreams.keyAt(i);
604 const Stream& s = mStreams.valueAt(i);
605 result.appendFormat(
606 " Stream %d: %d x %d, format 0x%x, stride %d\n",
607 id, s.width, s.height, s.format, s.stride);
608 }
609
610 write(fd, result.string(), result.size());
611
612 return NO_ERROR;
613 }
614
signalError()615 void EmulatedFakeCamera2::signalError() {
616 // TODO: Let parent know so we can shut down cleanly
617 ALOGE("Worker thread is signaling a serious error");
618 }
619
620 /** Pipeline control worker thread methods */
621
ConfigureThread(EmulatedFakeCamera2 * parent)622 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
623 Thread(false),
624 mParent(parent),
625 mRequestCount(0),
626 mNextBuffers(NULL) {
627 mRunning = false;
628 }
629
~ConfigureThread()630 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
631 }
632
readyToRun()633 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
634 Mutex::Autolock lock(mInputMutex);
635
636 ALOGV("Starting up ConfigureThread");
637 mRequest = NULL;
638 mActive = false;
639 mRunning = true;
640
641 mInputSignal.signal();
642 return NO_ERROR;
643 }
644
waitUntilRunning()645 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
646 Mutex::Autolock lock(mInputMutex);
647 if (!mRunning) {
648 ALOGV("Waiting for configure thread to start");
649 mInputSignal.wait(mInputMutex);
650 }
651 return OK;
652 }
653
newRequestAvailable()654 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
655 waitUntilRunning();
656
657 Mutex::Autolock lock(mInputMutex);
658
659 mActive = true;
660 mInputSignal.signal();
661
662 return OK;
663 }
664
isStreamInUse(uint32_t id)665 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
666 Mutex::Autolock lock(mInternalsMutex);
667
668 if (mNextBuffers == NULL) return false;
669 for (size_t i=0; i < mNextBuffers->size(); i++) {
670 if ((*mNextBuffers)[i].streamId == (int)id) return true;
671 }
672 return false;
673 }
674
getInProgressCount()675 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
676 Mutex::Autolock lock(mInputMutex);
677 return mRequestCount;
678 }
679
threadLoop()680 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
681 status_t res;
682
683 // Check if we're currently processing or just waiting
684 {
685 Mutex::Autolock lock(mInputMutex);
686 if (!mActive) {
687 // Inactive, keep waiting until we've been signaled
688 status_t res;
689 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
690 if (res != NO_ERROR && res != TIMED_OUT) {
691 ALOGE("%s: Error waiting for input requests: %d",
692 __FUNCTION__, res);
693 return false;
694 }
695 if (!mActive) return true;
696 ALOGV("New request available");
697 }
698 // Active
699 }
700 if (mRequest == NULL) {
701 Mutex::Autolock il(mInternalsMutex);
702
703 ALOGV("Configure: Getting next request");
704 res = mParent->mRequestQueueSrc->dequeue_request(
705 mParent->mRequestQueueSrc,
706 &mRequest);
707 if (res != NO_ERROR) {
708 ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
709 mParent->signalError();
710 return false;
711 }
712 if (mRequest == NULL) {
713 ALOGV("Configure: Request queue empty, going inactive");
714 // No requests available, go into inactive mode
715 Mutex::Autolock lock(mInputMutex);
716 mActive = false;
717 return true;
718 } else {
719 Mutex::Autolock lock(mInputMutex);
720 mRequestCount++;
721 }
722
723 camera_metadata_entry_t type;
724 res = find_camera_metadata_entry(mRequest,
725 ANDROID_REQUEST_TYPE,
726 &type);
727 if (res != NO_ERROR) {
728 ALOGE("%s: error reading request type", __FUNCTION__);
729 mParent->signalError();
730 return false;
731 }
732 bool success = false;;
733 switch (type.data.u8[0]) {
734 case ANDROID_REQUEST_TYPE_CAPTURE:
735 success = setupCapture();
736 break;
737 case ANDROID_REQUEST_TYPE_REPROCESS:
738 success = setupReprocess();
739 break;
740 default:
741 ALOGE("%s: Unexpected request type %d",
742 __FUNCTION__, type.data.u8[0]);
743 mParent->signalError();
744 break;
745 }
746 if (!success) return false;
747
748 }
749
750 if (mWaitingForReadout) {
751 bool readoutDone;
752 readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
753 if (!readoutDone) return true;
754
755 if (mNextNeedsJpeg) {
756 ALOGV("Configure: Waiting for JPEG compressor");
757 } else {
758 ALOGV("Configure: Waiting for sensor");
759 }
760 mWaitingForReadout = false;
761 }
762
763 if (mNextNeedsJpeg) {
764 bool jpegDone;
765 jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
766 if (!jpegDone) return true;
767
768 ALOGV("Configure: Waiting for sensor");
769 mNextNeedsJpeg = false;
770 }
771
772 if (mNextIsCapture) {
773 return configureNextCapture();
774 } else {
775 return configureNextReprocess();
776 }
777 }
778
setupCapture()779 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
780 status_t res;
781
782 mNextIsCapture = true;
783 // Get necessary parameters for sensor config
784 mParent->mControlThread->processRequest(mRequest);
785
786 camera_metadata_entry_t streams;
787 res = find_camera_metadata_entry(mRequest,
788 ANDROID_REQUEST_OUTPUT_STREAMS,
789 &streams);
790 if (res != NO_ERROR) {
791 ALOGE("%s: error reading output stream tag", __FUNCTION__);
792 mParent->signalError();
793 return false;
794 }
795
796 mNextBuffers = new Buffers;
797 mNextNeedsJpeg = false;
798 ALOGV("Configure: Setting up buffers for capture");
799 for (size_t i = 0; i < streams.count; i++) {
800 int streamId = streams.data.u8[i];
801 const Stream &s = mParent->getStreamInfo(streamId);
802 if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
803 ALOGE("%s: Stream %d does not have a concrete pixel format, but "
804 "is included in a request!", __FUNCTION__, streamId);
805 mParent->signalError();
806 return false;
807 }
808 StreamBuffer b;
809 b.streamId = streams.data.u8[i];
810 b.width = s.width;
811 b.height = s.height;
812 b.format = s.format;
813 b.stride = s.stride;
814 mNextBuffers->push_back(b);
815 ALOGV("Configure: Buffer %d: Stream %d, %d x %d, format 0x%x, "
816 "stride %d",
817 i, b.streamId, b.width, b.height, b.format, b.stride);
818 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
819 mNextNeedsJpeg = true;
820 }
821 }
822
823 camera_metadata_entry_t e;
824 res = find_camera_metadata_entry(mRequest,
825 ANDROID_REQUEST_FRAME_COUNT,
826 &e);
827 if (res != NO_ERROR) {
828 ALOGE("%s: error reading frame count tag: %s (%d)",
829 __FUNCTION__, strerror(-res), res);
830 mParent->signalError();
831 return false;
832 }
833 mNextFrameNumber = *e.data.i32;
834
835 res = find_camera_metadata_entry(mRequest,
836 ANDROID_SENSOR_EXPOSURE_TIME,
837 &e);
838 if (res != NO_ERROR) {
839 ALOGE("%s: error reading exposure time tag: %s (%d)",
840 __FUNCTION__, strerror(-res), res);
841 mParent->signalError();
842 return false;
843 }
844 mNextExposureTime = *e.data.i64;
845
846 res = find_camera_metadata_entry(mRequest,
847 ANDROID_SENSOR_FRAME_DURATION,
848 &e);
849 if (res != NO_ERROR) {
850 ALOGE("%s: error reading frame duration tag", __FUNCTION__);
851 mParent->signalError();
852 return false;
853 }
854 mNextFrameDuration = *e.data.i64;
855
856 if (mNextFrameDuration <
857 mNextExposureTime + Sensor::kMinVerticalBlank) {
858 mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
859 }
860 res = find_camera_metadata_entry(mRequest,
861 ANDROID_SENSOR_SENSITIVITY,
862 &e);
863 if (res != NO_ERROR) {
864 ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
865 mParent->signalError();
866 return false;
867 }
868 mNextSensitivity = *e.data.i32;
869
870 res = find_camera_metadata_entry(mRequest,
871 EMULATOR_SCENE_HOUROFDAY,
872 &e);
873 if (res == NO_ERROR) {
874 ALOGV("Setting hour: %d", *e.data.i32);
875 mParent->mSensor->getScene().setHour(*e.data.i32);
876 }
877
878 // Start waiting on readout thread
879 mWaitingForReadout = true;
880 ALOGV("Configure: Waiting for readout thread");
881
882 return true;
883 }
884
configureNextCapture()885 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
886 bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
887 if (!vsync) return true;
888
889 Mutex::Autolock il(mInternalsMutex);
890 ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
891 mParent->mSensor->setExposureTime(mNextExposureTime);
892 mParent->mSensor->setFrameDuration(mNextFrameDuration);
893 mParent->mSensor->setSensitivity(mNextSensitivity);
894
895 getBuffers();
896
897 ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
898 mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
899 mParent->mSensor->setDestinationBuffers(mNextBuffers);
900
901 mRequest = NULL;
902 mNextBuffers = NULL;
903
904 Mutex::Autolock lock(mInputMutex);
905 mRequestCount--;
906
907 return true;
908 }
909
setupReprocess()910 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
911 status_t res;
912
913 mNextNeedsJpeg = true;
914 mNextIsCapture = false;
915
916 camera_metadata_entry_t reprocessStreams;
917 res = find_camera_metadata_entry(mRequest,
918 ANDROID_REQUEST_INPUT_STREAMS,
919 &reprocessStreams);
920 if (res != NO_ERROR) {
921 ALOGE("%s: error reading output stream tag", __FUNCTION__);
922 mParent->signalError();
923 return false;
924 }
925
926 mNextBuffers = new Buffers;
927
928 ALOGV("Configure: Setting up input buffers for reprocess");
929 for (size_t i = 0; i < reprocessStreams.count; i++) {
930 int streamId = reprocessStreams.data.u8[i];
931 const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
932 if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
933 ALOGE("%s: Only ZSL reprocessing supported!",
934 __FUNCTION__);
935 mParent->signalError();
936 return false;
937 }
938 StreamBuffer b;
939 b.streamId = -streamId;
940 b.width = s.width;
941 b.height = s.height;
942 b.format = s.format;
943 b.stride = s.stride;
944 mNextBuffers->push_back(b);
945 }
946
947 camera_metadata_entry_t streams;
948 res = find_camera_metadata_entry(mRequest,
949 ANDROID_REQUEST_OUTPUT_STREAMS,
950 &streams);
951 if (res != NO_ERROR) {
952 ALOGE("%s: error reading output stream tag", __FUNCTION__);
953 mParent->signalError();
954 return false;
955 }
956
957 ALOGV("Configure: Setting up output buffers for reprocess");
958 for (size_t i = 0; i < streams.count; i++) {
959 int streamId = streams.data.u8[i];
960 const Stream &s = mParent->getStreamInfo(streamId);
961 if (s.format != HAL_PIXEL_FORMAT_BLOB) {
962 // TODO: Support reprocess to YUV
963 ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
964 __FUNCTION__, streamId);
965 mParent->signalError();
966 return false;
967 }
968 StreamBuffer b;
969 b.streamId = streams.data.u8[i];
970 b.width = s.width;
971 b.height = s.height;
972 b.format = s.format;
973 b.stride = s.stride;
974 mNextBuffers->push_back(b);
975 ALOGV("Configure: Buffer %d: Stream %d, %d x %d, format 0x%x, "
976 "stride %d",
977 i, b.streamId, b.width, b.height, b.format, b.stride);
978 }
979
980 camera_metadata_entry_t e;
981 res = find_camera_metadata_entry(mRequest,
982 ANDROID_REQUEST_FRAME_COUNT,
983 &e);
984 if (res != NO_ERROR) {
985 ALOGE("%s: error reading frame count tag: %s (%d)",
986 __FUNCTION__, strerror(-res), res);
987 mParent->signalError();
988 return false;
989 }
990 mNextFrameNumber = *e.data.i32;
991
992 return true;
993 }
994
configureNextReprocess()995 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
996 Mutex::Autolock il(mInternalsMutex);
997
998 getBuffers();
999
1000 ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
1001 mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
1002
1003 mRequest = NULL;
1004 mNextBuffers = NULL;
1005
1006 Mutex::Autolock lock(mInputMutex);
1007 mRequestCount--;
1008
1009 return true;
1010 }
1011
getBuffers()1012 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
1013 status_t res;
1014 /** Get buffers to fill for this frame */
1015 for (size_t i = 0; i < mNextBuffers->size(); i++) {
1016 StreamBuffer &b = mNextBuffers->editItemAt(i);
1017
1018 if (b.streamId > 0) {
1019 Stream s = mParent->getStreamInfo(b.streamId);
1020 ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
1021 res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
1022 if (res != NO_ERROR || b.buffer == NULL) {
1023 ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
1024 __FUNCTION__, b.streamId, strerror(-res), res);
1025 mParent->signalError();
1026 return false;
1027 }
1028
1029 /* Lock the buffer from the perspective of the graphics mapper */
1030 const Rect rect(s.width, s.height);
1031
1032 res = GraphicBufferMapper::get().lock(*(b.buffer),
1033 GRALLOC_USAGE_HW_CAMERA_WRITE,
1034 rect, (void**)&(b.img) );
1035
1036 if (res != NO_ERROR) {
1037 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
1038 __FUNCTION__, strerror(-res), res);
1039 s.ops->cancel_buffer(s.ops,
1040 b.buffer);
1041 mParent->signalError();
1042 return false;
1043 }
1044 } else {
1045 ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
1046 ALOGV("Configure: Acquiring buffer from reprocess stream %d",
1047 -b.streamId);
1048 res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
1049 if (res != NO_ERROR || b.buffer == NULL) {
1050 ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
1051 "%s (%d)", __FUNCTION__, -b.streamId,
1052 strerror(-res), res);
1053 mParent->signalError();
1054 return false;
1055 }
1056
1057 /* Lock the buffer from the perspective of the graphics mapper */
1058 const Rect rect(s.width, s.height);
1059
1060 res = GraphicBufferMapper::get().lock(*(b.buffer),
1061 GRALLOC_USAGE_HW_CAMERA_READ,
1062 rect, (void**)&(b.img) );
1063 if (res != NO_ERROR) {
1064 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
1065 __FUNCTION__, strerror(-res), res);
1066 s.ops->release_buffer(s.ops,
1067 b.buffer);
1068 mParent->signalError();
1069 return false;
1070 }
1071 }
1072 }
1073 return true;
1074 }
1075
ReadoutThread(EmulatedFakeCamera2 * parent)1076 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
1077 Thread(false),
1078 mParent(parent),
1079 mRunning(false),
1080 mActive(false),
1081 mRequestCount(0),
1082 mRequest(NULL),
1083 mBuffers(NULL) {
1084 mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
1085 mInFlightHead = 0;
1086 mInFlightTail = 0;
1087 }
1088
~ReadoutThread()1089 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
1090 delete mInFlightQueue;
1091 }
1092
readyToRun()1093 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
1094 Mutex::Autolock lock(mInputMutex);
1095 ALOGV("Starting up ReadoutThread");
1096 mRunning = true;
1097 mInputSignal.signal();
1098 return NO_ERROR;
1099 }
1100
waitUntilRunning()1101 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
1102 Mutex::Autolock lock(mInputMutex);
1103 if (!mRunning) {
1104 ALOGV("Waiting for readout thread to start");
1105 mInputSignal.wait(mInputMutex);
1106 }
1107 return OK;
1108 }
1109
waitForReady(nsecs_t timeout)1110 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
1111 status_t res;
1112 Mutex::Autolock lock(mInputMutex);
1113 while (!readyForNextCapture()) {
1114 res = mReadySignal.waitRelative(mInputMutex, timeout);
1115 if (res == TIMED_OUT) return false;
1116 if (res != OK) {
1117 ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
1118 strerror(-res), res);
1119 return false;
1120 }
1121 }
1122 return true;
1123 }
1124
readyForNextCapture()1125 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
1126 return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
1127 }
1128
setNextOperation(bool isCapture,camera_metadata_t * request,Buffers * buffers)1129 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
1130 bool isCapture,
1131 camera_metadata_t *request,
1132 Buffers *buffers) {
1133 Mutex::Autolock lock(mInputMutex);
1134 if ( !readyForNextCapture() ) {
1135 ALOGE("In flight queue full, dropping captures");
1136 mParent->signalError();
1137 return;
1138 }
1139 mInFlightQueue[mInFlightTail].isCapture = isCapture;
1140 mInFlightQueue[mInFlightTail].request = request;
1141 mInFlightQueue[mInFlightTail].buffers = buffers;
1142 mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
1143 mRequestCount++;
1144
1145 if (!mActive) {
1146 mActive = true;
1147 mInputSignal.signal();
1148 }
1149 }
1150
isStreamInUse(uint32_t id)1151 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
1152 Mutex::Autolock lock(mInputMutex);
1153
1154 size_t i = mInFlightHead;
1155 while (i != mInFlightTail) {
1156 for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
1157 if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
1158 return true;
1159 }
1160 i = (i + 1) % kInFlightQueueSize;
1161 }
1162
1163 Mutex::Autolock iLock(mInternalsMutex);
1164
1165 if (mBuffers != NULL) {
1166 for (i = 0; i < mBuffers->size(); i++) {
1167 if ( (*mBuffers)[i].streamId == (int)id) return true;
1168 }
1169 }
1170
1171 return false;
1172 }
1173
getInProgressCount()1174 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
1175 Mutex::Autolock lock(mInputMutex);
1176
1177 return mRequestCount;
1178 }
1179
threadLoop()1180 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
1181 static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
1182 status_t res;
1183 int32_t frameNumber;
1184
1185 // Check if we're currently processing or just waiting
1186 {
1187 Mutex::Autolock lock(mInputMutex);
1188 if (!mActive) {
1189 // Inactive, keep waiting until we've been signaled
1190 res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
1191 if (res != NO_ERROR && res != TIMED_OUT) {
1192 ALOGE("%s: Error waiting for capture requests: %d",
1193 __FUNCTION__, res);
1194 mParent->signalError();
1195 return false;
1196 }
1197 if (!mActive) return true;
1198 }
1199 // Active, see if we need a new request
1200 if (mRequest == NULL) {
1201 if (mInFlightHead == mInFlightTail) {
1202 // Go inactive
1203 ALOGV("Waiting for sensor data");
1204 mActive = false;
1205 return true;
1206 } else {
1207 Mutex::Autolock iLock(mInternalsMutex);
1208 mReadySignal.signal();
1209 mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
1210 mRequest = mInFlightQueue[mInFlightHead].request;
1211 mBuffers = mInFlightQueue[mInFlightHead].buffers;
1212 mInFlightQueue[mInFlightHead].request = NULL;
1213 mInFlightQueue[mInFlightHead].buffers = NULL;
1214 mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
1215 ALOGV("Ready to read out request %p, %d buffers",
1216 mRequest, mBuffers->size());
1217 }
1218 }
1219 }
1220
1221 // Active with request, wait on sensor to complete
1222
1223 nsecs_t captureTime;
1224
1225 if (mIsCapture) {
1226 bool gotFrame;
1227 gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
1228 &captureTime);
1229
1230 if (!gotFrame) return true;
1231 }
1232
1233 Mutex::Autolock iLock(mInternalsMutex);
1234
1235 camera_metadata_entry_t entry;
1236 if (!mIsCapture) {
1237 res = find_camera_metadata_entry(mRequest,
1238 ANDROID_SENSOR_TIMESTAMP,
1239 &entry);
1240 if (res != NO_ERROR) {
1241 ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
1242 __FUNCTION__, strerror(-res), res);
1243 mParent->signalError();
1244 return false;
1245 }
1246 captureTime = entry.data.i64[0];
1247 }
1248
1249 res = find_camera_metadata_entry(mRequest,
1250 ANDROID_REQUEST_FRAME_COUNT,
1251 &entry);
1252 if (res != NO_ERROR) {
1253 ALOGE("%s: error reading frame count tag: %s (%d)",
1254 __FUNCTION__, strerror(-res), res);
1255 mParent->signalError();
1256 return false;
1257 }
1258 frameNumber = *entry.data.i32;
1259
1260 res = find_camera_metadata_entry(mRequest,
1261 ANDROID_REQUEST_METADATA_MODE,
1262 &entry);
1263 if (res != NO_ERROR) {
1264 ALOGE("%s: error reading metadata mode tag: %s (%d)",
1265 __FUNCTION__, strerror(-res), res);
1266 mParent->signalError();
1267 return false;
1268 }
1269
1270 // Got sensor data and request, construct frame and send it out
1271 ALOGV("Readout: Constructing metadata and frames for request %d",
1272 frameNumber);
1273
1274 if (*entry.data.u8 == ANDROID_REQUEST_METADATA_FULL) {
1275 ALOGV("Readout: Metadata requested, constructing");
1276
1277 camera_metadata_t *frame = NULL;
1278
1279 size_t frame_entries = get_camera_metadata_entry_count(mRequest);
1280 size_t frame_data = get_camera_metadata_data_count(mRequest);
1281
1282 // TODO: Dynamically calculate based on enabled statistics, etc
1283 frame_entries += 10;
1284 frame_data += 100;
1285
1286 res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
1287 frame_entries, frame_data, &frame);
1288
1289 if (res != NO_ERROR || frame == NULL) {
1290 ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
1291 mParent->signalError();
1292 return false;
1293 }
1294
1295 res = append_camera_metadata(frame, mRequest);
1296 if (res != NO_ERROR) {
1297 ALOGE("Unable to append request metadata");
1298 }
1299
1300 if (mIsCapture) {
1301 add_camera_metadata_entry(frame,
1302 ANDROID_SENSOR_TIMESTAMP,
1303 &captureTime,
1304 1);
1305
1306 int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
1307 camera_metadata_entry_t requestedHour;
1308 res = find_camera_metadata_entry(frame,
1309 EMULATOR_SCENE_HOUROFDAY,
1310 &requestedHour);
1311 if (res == NAME_NOT_FOUND) {
1312 res = add_camera_metadata_entry(frame,
1313 EMULATOR_SCENE_HOUROFDAY,
1314 &hourOfDay, 1);
1315 if (res != NO_ERROR) {
1316 ALOGE("Unable to add vendor tag");
1317 }
1318 } else if (res == OK) {
1319 *requestedHour.data.i32 = hourOfDay;
1320 } else {
1321 ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
1322 }
1323
1324 collectStatisticsMetadata(frame);
1325 // TODO: Collect all final values used from sensor in addition to timestamp
1326 }
1327
1328 ALOGV("Readout: Enqueue frame %d", frameNumber);
1329 mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
1330 frame);
1331 }
1332 ALOGV("Readout: Free request");
1333 res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
1334 if (res != NO_ERROR) {
1335 ALOGE("%s: Unable to return request buffer to queue: %d",
1336 __FUNCTION__, res);
1337 mParent->signalError();
1338 return false;
1339 }
1340 mRequest = NULL;
1341
1342 int compressedBufferIndex = -1;
1343 ALOGV("Readout: Processing %d buffers", mBuffers->size());
1344 for (size_t i = 0; i < mBuffers->size(); i++) {
1345 const StreamBuffer &b = (*mBuffers)[i];
1346 ALOGV("Readout: Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
1347 i, b.streamId, b.width, b.height, b.format, b.stride);
1348 if (b.streamId > 0) {
1349 if (b.format == HAL_PIXEL_FORMAT_BLOB) {
1350 // Assumes only one BLOB buffer type per capture
1351 compressedBufferIndex = i;
1352 } else {
1353 ALOGV("Readout: Sending image buffer %d (%p) to output stream %d",
1354 i, (void*)*(b.buffer), b.streamId);
1355 GraphicBufferMapper::get().unlock(*(b.buffer));
1356 const Stream &s = mParent->getStreamInfo(b.streamId);
1357 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
1358 if (res != OK) {
1359 ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
1360 strerror(-res), res);
1361 mParent->signalError();
1362 }
1363 }
1364 }
1365 }
1366
1367 if (compressedBufferIndex == -1) {
1368 delete mBuffers;
1369 mBuffers = NULL;
1370 } else {
1371 ALOGV("Readout: Starting JPEG compression for buffer %d, stream %d",
1372 compressedBufferIndex,
1373 (*mBuffers)[compressedBufferIndex].streamId);
1374 mParent->mJpegCompressor->start(mBuffers, captureTime);
1375 mBuffers = NULL;
1376 }
1377
1378 Mutex::Autolock l(mInputMutex);
1379 mRequestCount--;
1380 ALOGV("Readout: Done with request %d", frameNumber);
1381 return true;
1382 }
1383
collectStatisticsMetadata(camera_metadata_t * frame)1384 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
1385 camera_metadata_t *frame) {
1386 // Completely fake face rectangles, don't correspond to real faces in scene
1387 ALOGV("Readout: Collecting statistics metadata");
1388
1389 status_t res;
1390 camera_metadata_entry_t entry;
1391 res = find_camera_metadata_entry(frame,
1392 ANDROID_STATS_FACE_DETECT_MODE,
1393 &entry);
1394 if (res != OK) {
1395 ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
1396 return BAD_VALUE;
1397 }
1398
1399 if (entry.data.u8[0] == ANDROID_STATS_FACE_DETECTION_OFF) return OK;
1400
1401 // The coordinate system for the face regions is the raw sensor pixel
1402 // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
1403 // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
1404 // approximately place two faces on top of the windows of the house. No
1405 // actual faces exist there, but might one day. Note that this doesn't
1406 // account for the offsets used to account for aspect ratio differences, so
1407 // the rectangles don't line up quite right.
1408 const size_t numFaces = 2;
1409 int32_t rects[numFaces * 4] = {
1410 Sensor::kResolution[0] * 10 / 20,
1411 Sensor::kResolution[1] * 15 / 20,
1412 Sensor::kResolution[0] * 12 / 20,
1413 Sensor::kResolution[1] * 17 / 20,
1414
1415 Sensor::kResolution[0] * 16 / 20,
1416 Sensor::kResolution[1] * 15 / 20,
1417 Sensor::kResolution[0] * 18 / 20,
1418 Sensor::kResolution[1] * 17 / 20
1419 };
1420 // To simulate some kind of real detection going on, we jitter the rectangles on
1421 // each frame by a few pixels in each dimension.
1422 for (size_t i = 0; i < numFaces * 4; i++) {
1423 rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
1424 }
1425 // The confidence scores (0-100) are similarly jittered.
1426 uint8_t scores[numFaces] = { 85, 95 };
1427 for (size_t i = 0; i < numFaces; i++) {
1428 scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
1429 }
1430
1431 res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_RECTANGLES,
1432 rects, numFaces * 4);
1433 if (res != OK) {
1434 ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
1435 return BAD_VALUE;
1436 }
1437
1438 res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_SCORES,
1439 scores, numFaces);
1440 if (res != OK) {
1441 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1442 return BAD_VALUE;
1443 }
1444
1445 if (entry.data.u8[0] == ANDROID_STATS_FACE_DETECTION_SIMPLE) return OK;
1446
1447 // Advanced face detection options - add eye/mouth coordinates. The
1448 // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
1449 // mouthX, mouthY). The mapping is the same as the face rectangles.
1450 int32_t features[numFaces * 6] = {
1451 Sensor::kResolution[0] * 10.5 / 20,
1452 Sensor::kResolution[1] * 16 / 20,
1453 Sensor::kResolution[0] * 11.5 / 20,
1454 Sensor::kResolution[1] * 16 / 20,
1455 Sensor::kResolution[0] * 11 / 20,
1456 Sensor::kResolution[1] * 16.5 / 20,
1457
1458 Sensor::kResolution[0] * 16.5 / 20,
1459 Sensor::kResolution[1] * 16 / 20,
1460 Sensor::kResolution[0] * 17.5 / 20,
1461 Sensor::kResolution[1] * 16 / 20,
1462 Sensor::kResolution[0] * 17 / 20,
1463 Sensor::kResolution[1] * 16.5 / 20,
1464 };
1465 // Jitter these a bit less than the rects
1466 for (size_t i = 0; i < numFaces * 6; i++) {
1467 features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
1468 }
1469 // These are unique IDs that are used to identify each face while it's
1470 // visible to the detector (if a face went away and came back, it'd get a
1471 // new ID).
1472 int32_t ids[numFaces] = {
1473 100, 200
1474 };
1475
1476 res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_LANDMARKS,
1477 features, numFaces * 6);
1478 if (res != OK) {
1479 ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
1480 return BAD_VALUE;
1481 }
1482
1483 res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_IDS,
1484 ids, numFaces);
1485 if (res != OK) {
1486 ALOGE("%s: Unable to add face scores!", __FUNCTION__);
1487 return BAD_VALUE;
1488 }
1489
1490 return OK;
1491 }
1492
ControlThread(EmulatedFakeCamera2 * parent)1493 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent):
1494 Thread(false),
1495 mParent(parent) {
1496 mRunning = false;
1497 }
1498
~ControlThread()1499 EmulatedFakeCamera2::ControlThread::~ControlThread() {
1500 }
1501
readyToRun()1502 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
1503 Mutex::Autolock lock(mInputMutex);
1504
1505 ALOGV("Starting up ControlThread");
1506 mRunning = true;
1507 mStartAf = false;
1508 mCancelAf = false;
1509 mStartPrecapture = false;
1510
1511 mControlMode = ANDROID_CONTROL_AUTO;
1512
1513 mEffectMode = ANDROID_CONTROL_EFFECT_OFF;
1514 mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1515
1516 mAfMode = ANDROID_CONTROL_AF_AUTO;
1517 mAfModeChange = false;
1518
1519 mAeMode = ANDROID_CONTROL_AE_ON;
1520 mAwbMode = ANDROID_CONTROL_AWB_AUTO;
1521
1522 mAfTriggerId = 0;
1523 mPrecaptureTriggerId = 0;
1524
1525 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1526 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1527 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1528
1529 mExposureTime = kNormalExposureTime;
1530
1531 mInputSignal.signal();
1532 return NO_ERROR;
1533 }
1534
waitUntilRunning()1535 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
1536 Mutex::Autolock lock(mInputMutex);
1537 if (!mRunning) {
1538 ALOGV("Waiting for control thread to start");
1539 mInputSignal.wait(mInputMutex);
1540 }
1541 return OK;
1542 }
1543
processRequest(camera_metadata_t * request)1544 status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
1545 Mutex::Autolock lock(mInputMutex);
1546 // TODO: Add handling for all android.control.* fields here
1547 camera_metadata_entry_t mode;
1548 status_t res;
1549
1550 res = find_camera_metadata_entry(request,
1551 ANDROID_CONTROL_MODE,
1552 &mode);
1553 mControlMode = mode.data.u8[0];
1554
1555 res = find_camera_metadata_entry(request,
1556 ANDROID_CONTROL_EFFECT_MODE,
1557 &mode);
1558 mEffectMode = mode.data.u8[0];
1559
1560 res = find_camera_metadata_entry(request,
1561 ANDROID_CONTROL_SCENE_MODE,
1562 &mode);
1563 mSceneMode = mode.data.u8[0];
1564
1565 res = find_camera_metadata_entry(request,
1566 ANDROID_CONTROL_AF_MODE,
1567 &mode);
1568 if (mAfMode != mode.data.u8[0]) {
1569 ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
1570 mAfMode = mode.data.u8[0];
1571 mAfModeChange = true;
1572 mStartAf = false;
1573 mCancelAf = false;
1574 }
1575
1576 res = find_camera_metadata_entry(request,
1577 ANDROID_CONTROL_AE_MODE,
1578 &mode);
1579 mAeMode = mode.data.u8[0];
1580
1581 res = find_camera_metadata_entry(request,
1582 ANDROID_CONTROL_AE_LOCK,
1583 &mode);
1584 bool aeLock = (mode.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
1585 if (mAeLock && !aeLock) {
1586 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1587 }
1588 mAeLock = aeLock;
1589
1590 res = find_camera_metadata_entry(request,
1591 ANDROID_CONTROL_AWB_MODE,
1592 &mode);
1593 mAwbMode = mode.data.u8[0];
1594
1595 // TODO: Override more control fields
1596
1597 if (mAeMode != ANDROID_CONTROL_AE_OFF) {
1598 camera_metadata_entry_t exposureTime;
1599 res = find_camera_metadata_entry(request,
1600 ANDROID_SENSOR_EXPOSURE_TIME,
1601 &exposureTime);
1602 if (res == OK) {
1603 exposureTime.data.i64[0] = mExposureTime;
1604 }
1605 }
1606
1607 return OK;
1608 }
1609
triggerAction(uint32_t msgType,int32_t ext1,int32_t ext2)1610 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
1611 int32_t ext1, int32_t ext2) {
1612 ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
1613 Mutex::Autolock lock(mInputMutex);
1614 switch (msgType) {
1615 case CAMERA2_TRIGGER_AUTOFOCUS:
1616 mAfTriggerId = ext1;
1617 mStartAf = true;
1618 mCancelAf = false;
1619 break;
1620 case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1621 mAfTriggerId = ext1;
1622 mStartAf = false;
1623 mCancelAf = true;
1624 break;
1625 case CAMERA2_TRIGGER_PRECAPTURE_METERING:
1626 mPrecaptureTriggerId = ext1;
1627 mStartPrecapture = true;
1628 break;
1629 default:
1630 ALOGE("%s: Unknown action triggered: %d (arguments %d %d)",
1631 __FUNCTION__, msgType, ext1, ext2);
1632 return BAD_VALUE;
1633 }
1634 return OK;
1635 }
1636
1637 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
1638 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
1639 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
1640 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
1641 // Once every 5 seconds
1642 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
1643 kControlCycleDelay / 5.0 * SEC;
1644 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
1645 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
1646 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
1647 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
1648 // Once every 3 seconds
1649 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
1650 kControlCycleDelay / 3000000000.0;
1651
1652 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
1653 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
1654 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
1655
threadLoop()1656 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
1657 bool afModeChange = false;
1658 bool afTriggered = false;
1659 bool afCancelled = false;
1660 uint8_t afState;
1661 uint8_t afMode;
1662 int32_t afTriggerId;
1663 bool precaptureTriggered = false;
1664 uint8_t aeState;
1665 uint8_t aeMode;
1666 bool aeLock;
1667 int32_t precaptureTriggerId;
1668 nsecs_t nextSleep = kControlCycleDelay;
1669
1670 {
1671 Mutex::Autolock lock(mInputMutex);
1672 if (mStartAf) {
1673 ALOGD("Starting AF trigger processing");
1674 afTriggered = true;
1675 mStartAf = false;
1676 } else if (mCancelAf) {
1677 ALOGD("Starting cancel AF trigger processing");
1678 afCancelled = true;
1679 mCancelAf = false;
1680 }
1681 afState = mAfState;
1682 afMode = mAfMode;
1683 afModeChange = mAfModeChange;
1684 mAfModeChange = false;
1685
1686 afTriggerId = mAfTriggerId;
1687
1688 if(mStartPrecapture) {
1689 ALOGD("Starting precapture trigger processing");
1690 precaptureTriggered = true;
1691 mStartPrecapture = false;
1692 }
1693 aeState = mAeState;
1694 aeMode = mAeMode;
1695 aeLock = mAeLock;
1696 precaptureTriggerId = mPrecaptureTriggerId;
1697 }
1698
1699 if (afCancelled || afModeChange) {
1700 ALOGV("Resetting AF state due to cancel/mode change");
1701 afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1702 updateAfState(afState, afTriggerId);
1703 mAfScanDuration = 0;
1704 mLockAfterPassiveScan = false;
1705 }
1706
1707 uint8_t oldAfState = afState;
1708
1709 if (afTriggered) {
1710 afState = processAfTrigger(afMode, afState);
1711 }
1712
1713 afState = maybeStartAfScan(afMode, afState);
1714 afState = updateAfScan(afMode, afState, &nextSleep);
1715 updateAfState(afState, afTriggerId);
1716
1717 if (precaptureTriggered) {
1718 aeState = processPrecaptureTrigger(aeMode, aeState);
1719 }
1720
1721 aeState = maybeStartAeScan(aeMode, aeLock, aeState);
1722 aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
1723 updateAeState(aeState, precaptureTriggerId);
1724
1725 int ret;
1726 timespec t;
1727 t.tv_sec = 0;
1728 t.tv_nsec = nextSleep;
1729 do {
1730 ret = nanosleep(&t, &t);
1731 } while (ret != 0);
1732
1733 if (mAfScanDuration > 0) {
1734 mAfScanDuration -= nextSleep;
1735 }
1736 if (mAeScanDuration > 0) {
1737 mAeScanDuration -= nextSleep;
1738 }
1739
1740 return true;
1741 }
1742
processAfTrigger(uint8_t afMode,uint8_t afState)1743 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
1744 uint8_t afState) {
1745 switch (afMode) {
1746 case ANDROID_CONTROL_AF_OFF:
1747 case ANDROID_CONTROL_AF_EDOF:
1748 // Do nothing
1749 break;
1750 case ANDROID_CONTROL_AF_MACRO:
1751 case ANDROID_CONTROL_AF_AUTO:
1752 switch (afState) {
1753 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1754 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1755 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1756 // Start new focusing cycle
1757 mAfScanDuration = ((double)rand() / RAND_MAX) *
1758 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
1759 afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1760 ALOGV("%s: AF scan start, duration %lld ms",
1761 __FUNCTION__, mAfScanDuration / 1000000);
1762 break;
1763 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1764 // Ignore new request, already scanning
1765 break;
1766 default:
1767 ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d",
1768 afState);
1769 }
1770 break;
1771 case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
1772 switch (afState) {
1773 // Picture mode waits for passive scan to complete
1774 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1775 mLockAfterPassiveScan = true;
1776 break;
1777 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1778 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1779 break;
1780 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1781 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1782 break;
1783 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1784 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1785 // Must cancel to get out of these states
1786 break;
1787 default:
1788 ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
1789 afState);
1790 }
1791 break;
1792 case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
1793 switch (afState) {
1794 // Video mode does not wait for passive scan to complete
1795 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1796 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1797 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1798 break;
1799 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1800 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1801 break;
1802 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1803 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1804 // Must cancel to get out of these states
1805 break;
1806 default:
1807 ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d",
1808 afState);
1809 }
1810 break;
1811 default:
1812 break;
1813 }
1814 return afState;
1815 }
1816
maybeStartAfScan(uint8_t afMode,uint8_t afState)1817 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
1818 uint8_t afState) {
1819 if ((afMode == ANDROID_CONTROL_AF_CONTINUOUS_VIDEO ||
1820 afMode == ANDROID_CONTROL_AF_CONTINUOUS_PICTURE) &&
1821 (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
1822 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
1823
1824 bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
1825 if (startScan) {
1826 // Start new passive focusing cycle
1827 mAfScanDuration = ((double)rand() / RAND_MAX) *
1828 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
1829 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
1830 ALOGV("%s: AF passive scan start, duration %lld ms",
1831 __FUNCTION__, mAfScanDuration / 1000000);
1832 }
1833 }
1834 return afState;
1835 }
1836
updateAfScan(uint8_t afMode,uint8_t afState,nsecs_t * maxSleep)1837 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
1838 uint8_t afState, nsecs_t *maxSleep) {
1839 if (! (afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
1840 afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN ) ) {
1841 return afState;
1842 }
1843
1844 if (mAfScanDuration <= 0) {
1845 ALOGV("%s: AF scan done", __FUNCTION__);
1846 switch (afMode) {
1847 case ANDROID_CONTROL_AF_MACRO:
1848 case ANDROID_CONTROL_AF_AUTO: {
1849 bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
1850 if (success) {
1851 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1852 } else {
1853 afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1854 }
1855 break;
1856 }
1857 case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
1858 if (mLockAfterPassiveScan) {
1859 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1860 mLockAfterPassiveScan = false;
1861 } else {
1862 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1863 }
1864 break;
1865 case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
1866 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1867 break;
1868 default:
1869 ALOGE("Unexpected AF mode in scan state");
1870 }
1871 } else {
1872 if (mAfScanDuration <= *maxSleep) {
1873 *maxSleep = mAfScanDuration;
1874 }
1875 }
1876 return afState;
1877 }
1878
updateAfState(uint8_t newState,int32_t triggerId)1879 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
1880 int32_t triggerId) {
1881 Mutex::Autolock lock(mInputMutex);
1882 if (mAfState != newState) {
1883 ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__,
1884 newState, triggerId);
1885 mAfState = newState;
1886 mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS,
1887 newState, triggerId, 0);
1888 }
1889 }
1890
processPrecaptureTrigger(uint8_t aeMode,uint8_t aeState)1891 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
1892 uint8_t aeState) {
1893 switch (aeMode) {
1894 case ANDROID_CONTROL_AE_OFF:
1895 // Don't do anything for these
1896 return aeState;
1897 case ANDROID_CONTROL_AE_ON:
1898 case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
1899 case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
1900 case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE:
1901 // Trigger a precapture cycle
1902 aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1903 mAeScanDuration = ((double)rand() / RAND_MAX) *
1904 (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
1905 kMinPrecaptureAeDuration;
1906 ALOGD("%s: AE precapture scan start, duration %lld ms",
1907 __FUNCTION__, mAeScanDuration / 1000000);
1908
1909 }
1910 return aeState;
1911 }
1912
maybeStartAeScan(uint8_t aeMode,bool aeLocked,uint8_t aeState)1913 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
1914 bool aeLocked,
1915 uint8_t aeState) {
1916 if (aeLocked) return aeState;
1917 switch (aeMode) {
1918 case ANDROID_CONTROL_AE_OFF:
1919 break;
1920 case ANDROID_CONTROL_AE_ON:
1921 case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
1922 case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
1923 case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE: {
1924 if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
1925 aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
1926
1927 bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
1928 if (startScan) {
1929 mAeScanDuration = ((double)rand() / RAND_MAX) *
1930 (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
1931 aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
1932 ALOGD("%s: AE scan start, duration %lld ms",
1933 __FUNCTION__, mAeScanDuration / 1000000);
1934 }
1935 }
1936 }
1937
1938 return aeState;
1939 }
1940
updateAeScan(uint8_t aeMode,bool aeLock,uint8_t aeState,nsecs_t * maxSleep)1941 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
1942 bool aeLock, uint8_t aeState, nsecs_t *maxSleep) {
1943 if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1944 mAeScanDuration = 0;
1945 aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
1946 } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
1947 (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
1948 if (mAeScanDuration <= 0) {
1949 ALOGD("%s: AE scan done", __FUNCTION__);
1950 aeState = aeLock ?
1951 ANDROID_CONTROL_AE_STATE_LOCKED :ANDROID_CONTROL_AE_STATE_CONVERGED;
1952
1953 Mutex::Autolock lock(mInputMutex);
1954 mExposureTime = kNormalExposureTime;
1955 } else {
1956 if (mAeScanDuration <= *maxSleep) {
1957 *maxSleep = mAeScanDuration;
1958 }
1959
1960 int64_t exposureDelta =
1961 ((double)rand() / RAND_MAX) * 2 * kExposureJump -
1962 kExposureJump;
1963 Mutex::Autolock lock(mInputMutex);
1964 mExposureTime = mExposureTime + exposureDelta;
1965 if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
1966 }
1967 }
1968
1969 return aeState;
1970 }
1971
1972
updateAeState(uint8_t newState,int32_t triggerId)1973 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
1974 int32_t triggerId) {
1975 Mutex::Autolock lock(mInputMutex);
1976 if (mAeState != newState) {
1977 ALOGD("%s: Autoexposure state now %d, id %d", __FUNCTION__,
1978 newState, triggerId);
1979 mAeState = newState;
1980 mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
1981 newState, triggerId, 0);
1982 }
1983 }
1984
1985 /** Private methods */
1986
constructStaticInfo(camera_metadata_t ** info,bool sizeRequest) const1987 status_t EmulatedFakeCamera2::constructStaticInfo(
1988 camera_metadata_t **info,
1989 bool sizeRequest) const {
1990
1991 size_t entryCount = 0;
1992 size_t dataCount = 0;
1993 status_t ret;
1994
1995 #define ADD_OR_SIZE( tag, data, count ) \
1996 if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
1997 tag, data, count) ) != OK ) return ret
1998
1999 // android.lens
2000
2001 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
2002 const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
2003 ADD_OR_SIZE(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE,
2004 &minFocusDistance, 1);
2005 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
2006 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
2007 ADD_OR_SIZE(ANDROID_LENS_HYPERFOCAL_DISTANCE,
2008 &minFocusDistance, 1);
2009
2010 static const float focalLength = 3.30f; // mm
2011 ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS,
2012 &focalLength, 1);
2013 static const float aperture = 2.8f;
2014 ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_APERTURES,
2015 &aperture, 1);
2016 static const float filterDensity = 0;
2017 ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FILTER_DENSITY,
2018 &filterDensity, 1);
2019 static const uint8_t availableOpticalStabilization =
2020 ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
2021 ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_OPTICAL_STABILIZATION,
2022 &availableOpticalStabilization, 1);
2023
2024 static const int32_t lensShadingMapSize[] = {1, 1};
2025 ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP_SIZE, lensShadingMapSize,
2026 sizeof(lensShadingMapSize)/sizeof(int32_t));
2027
2028 static const float lensShadingMap[3 * 1 * 1 ] =
2029 { 1.f, 1.f, 1.f };
2030 ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP, lensShadingMap,
2031 sizeof(lensShadingMap)/sizeof(float));
2032
2033 // Identity transform
2034 static const int32_t geometricCorrectionMapSize[] = {2, 2};
2035 ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP_SIZE,
2036 geometricCorrectionMapSize,
2037 sizeof(geometricCorrectionMapSize)/sizeof(int32_t));
2038
2039 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = {
2040 0.f, 0.f, 0.f, 0.f, 0.f, 0.f,
2041 1.f, 0.f, 1.f, 0.f, 1.f, 0.f,
2042 0.f, 1.f, 0.f, 1.f, 0.f, 1.f,
2043 1.f, 1.f, 1.f, 1.f, 1.f, 1.f};
2044 ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP,
2045 geometricCorrectionMap,
2046 sizeof(geometricCorrectionMap)/sizeof(float));
2047
2048 int32_t lensFacing = mFacingBack ?
2049 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
2050 ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
2051
2052 float lensPosition[3];
2053 if (mFacingBack) {
2054 // Back-facing camera is center-top on device
2055 lensPosition[0] = 0;
2056 lensPosition[1] = 20;
2057 lensPosition[2] = -5;
2058 } else {
2059 // Front-facing camera is center-right on device
2060 lensPosition[0] = 20;
2061 lensPosition[1] = 20;
2062 lensPosition[2] = 0;
2063 }
2064 ADD_OR_SIZE(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
2065 sizeof(float));
2066
2067 // android.sensor
2068
2069 ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME_RANGE,
2070 Sensor::kExposureTimeRange, 2);
2071
2072 ADD_OR_SIZE(ANDROID_SENSOR_MAX_FRAME_DURATION,
2073 &Sensor::kFrameDurationRange[1], 1);
2074
2075 ADD_OR_SIZE(ANDROID_SENSOR_AVAILABLE_SENSITIVITIES,
2076 Sensor::kAvailableSensitivities,
2077 sizeof(Sensor::kAvailableSensitivities)
2078 /sizeof(uint32_t));
2079
2080 ADD_OR_SIZE(ANDROID_SENSOR_COLOR_FILTER_ARRANGEMENT,
2081 &Sensor::kColorFilterArrangement, 1);
2082
2083 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
2084 ADD_OR_SIZE(ANDROID_SENSOR_PHYSICAL_SIZE,
2085 sensorPhysicalSize, 2);
2086
2087 ADD_OR_SIZE(ANDROID_SENSOR_PIXEL_ARRAY_SIZE,
2088 Sensor::kResolution, 2);
2089
2090 ADD_OR_SIZE(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE,
2091 Sensor::kResolution, 2);
2092
2093 ADD_OR_SIZE(ANDROID_SENSOR_WHITE_LEVEL,
2094 &Sensor::kMaxRawValue, 1);
2095
2096 static const int32_t blackLevelPattern[4] = {
2097 Sensor::kBlackLevel, Sensor::kBlackLevel,
2098 Sensor::kBlackLevel, Sensor::kBlackLevel
2099 };
2100 ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
2101 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
2102
2103 //TODO: sensor color calibration fields
2104
2105 // android.flash
2106 static const uint8_t flashAvailable = 0;
2107 ADD_OR_SIZE(ANDROID_FLASH_AVAILABLE, &flashAvailable, 1);
2108
2109 static const int64_t flashChargeDuration = 0;
2110 ADD_OR_SIZE(ANDROID_FLASH_CHARGE_DURATION, &flashChargeDuration, 1);
2111
2112 // android.tonemap
2113
2114 static const int32_t tonemapCurvePoints = 128;
2115 ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
2116
2117 // android.scaler
2118
2119 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
2120 kAvailableFormats,
2121 sizeof(kAvailableFormats)/sizeof(uint32_t));
2122
2123 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
2124 kAvailableRawSizes,
2125 sizeof(kAvailableRawSizes)/sizeof(uint32_t));
2126
2127 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
2128 kAvailableRawMinDurations,
2129 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
2130
2131 if (mFacingBack) {
2132 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2133 kAvailableProcessedSizesBack,
2134 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
2135 } else {
2136 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
2137 kAvailableProcessedSizesFront,
2138 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
2139 }
2140
2141 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
2142 kAvailableProcessedMinDurations,
2143 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
2144
2145 if (mFacingBack) {
2146 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2147 kAvailableJpegSizesBack,
2148 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
2149 } else {
2150 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
2151 kAvailableJpegSizesFront,
2152 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
2153 }
2154
2155 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
2156 kAvailableJpegMinDurations,
2157 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
2158
2159 static const float maxZoom = 10;
2160 ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_ZOOM,
2161 &maxZoom, 1);
2162
2163 // android.jpeg
2164
2165 static const int32_t jpegThumbnailSizes[] = {
2166 0, 0,
2167 160, 120,
2168 320, 240
2169 };
2170 ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
2171 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
2172
2173 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
2174 ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
2175
2176 // android.stats
2177
2178 static const uint8_t availableFaceDetectModes[] = {
2179 ANDROID_STATS_FACE_DETECTION_OFF,
2180 ANDROID_STATS_FACE_DETECTION_SIMPLE,
2181 ANDROID_STATS_FACE_DETECTION_FULL
2182 };
2183
2184 ADD_OR_SIZE(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES,
2185 availableFaceDetectModes,
2186 sizeof(availableFaceDetectModes));
2187
2188 static const int32_t maxFaceCount = 8;
2189 ADD_OR_SIZE(ANDROID_STATS_MAX_FACE_COUNT,
2190 &maxFaceCount, 1);
2191
2192 static const int32_t histogramSize = 64;
2193 ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_BUCKET_COUNT,
2194 &histogramSize, 1);
2195
2196 static const int32_t maxHistogramCount = 1000;
2197 ADD_OR_SIZE(ANDROID_STATS_MAX_HISTOGRAM_COUNT,
2198 &maxHistogramCount, 1);
2199
2200 static const int32_t sharpnessMapSize[2] = {64, 64};
2201 ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_SIZE,
2202 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
2203
2204 static const int32_t maxSharpnessMapValue = 1000;
2205 ADD_OR_SIZE(ANDROID_STATS_MAX_SHARPNESS_MAP_VALUE,
2206 &maxSharpnessMapValue, 1);
2207
2208 // android.control
2209
2210 static const uint8_t availableSceneModes[] = {
2211 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED
2212 };
2213 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
2214 availableSceneModes, sizeof(availableSceneModes));
2215
2216 static const uint8_t availableEffects[] = {
2217 ANDROID_CONTROL_EFFECT_OFF
2218 };
2219 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2220 availableEffects, sizeof(availableEffects));
2221
2222 int32_t max3aRegions = 0;
2223 ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
2224 &max3aRegions, 1);
2225
2226 static const uint8_t availableAeModes[] = {
2227 ANDROID_CONTROL_AE_OFF,
2228 ANDROID_CONTROL_AE_ON
2229 };
2230 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2231 availableAeModes, sizeof(availableAeModes));
2232
2233 static const camera_metadata_rational exposureCompensationStep = {
2234 1, 3
2235 };
2236 ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP,
2237 &exposureCompensationStep, 1);
2238
2239 int32_t exposureCompensationRange[] = {-9, 9};
2240 ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE,
2241 exposureCompensationRange,
2242 sizeof(exposureCompensationRange)/sizeof(int32_t));
2243
2244 static const int32_t availableTargetFpsRanges[] = {
2245 5, 30, 15, 30
2246 };
2247 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2248 availableTargetFpsRanges,
2249 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
2250
2251 static const uint8_t availableAntibandingModes[] = {
2252 ANDROID_CONTROL_AE_ANTIBANDING_OFF,
2253 ANDROID_CONTROL_AE_ANTIBANDING_AUTO
2254 };
2255 ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2256 availableAntibandingModes, sizeof(availableAntibandingModes));
2257
2258 static const uint8_t availableAwbModes[] = {
2259 ANDROID_CONTROL_AWB_OFF,
2260 ANDROID_CONTROL_AWB_AUTO,
2261 ANDROID_CONTROL_AWB_INCANDESCENT,
2262 ANDROID_CONTROL_AWB_FLUORESCENT,
2263 ANDROID_CONTROL_AWB_DAYLIGHT,
2264 ANDROID_CONTROL_AWB_SHADE
2265 };
2266 ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2267 availableAwbModes, sizeof(availableAwbModes));
2268
2269 static const uint8_t availableAfModesBack[] = {
2270 ANDROID_CONTROL_AF_OFF,
2271 ANDROID_CONTROL_AF_AUTO,
2272 ANDROID_CONTROL_AF_MACRO,
2273 ANDROID_CONTROL_AF_CONTINUOUS_VIDEO,
2274 ANDROID_CONTROL_AF_CONTINUOUS_PICTURE
2275 };
2276
2277 static const uint8_t availableAfModesFront[] = {
2278 ANDROID_CONTROL_AF_OFF
2279 };
2280
2281 if (mFacingBack) {
2282 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2283 availableAfModesBack, sizeof(availableAfModesBack));
2284 } else {
2285 ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2286 availableAfModesFront, sizeof(availableAfModesFront));
2287 }
2288
2289 static const uint8_t availableVstabModes[] = {
2290 ANDROID_CONTROL_VIDEO_STABILIZATION_OFF
2291 };
2292 ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2293 availableVstabModes, sizeof(availableVstabModes));
2294
2295 #undef ADD_OR_SIZE
2296 /** Allocate metadata if sizing */
2297 if (sizeRequest) {
2298 ALOGV("Allocating %d entries, %d extra bytes for "
2299 "static camera info",
2300 entryCount, dataCount);
2301 *info = allocate_camera_metadata(entryCount, dataCount);
2302 if (*info == NULL) {
2303 ALOGE("Unable to allocate camera static info"
2304 "(%d entries, %d bytes extra data)",
2305 entryCount, dataCount);
2306 return NO_MEMORY;
2307 }
2308 }
2309 return OK;
2310 }
2311
constructDefaultRequest(int request_template,camera_metadata_t ** request,bool sizeRequest) const2312 status_t EmulatedFakeCamera2::constructDefaultRequest(
2313 int request_template,
2314 camera_metadata_t **request,
2315 bool sizeRequest) const {
2316
2317 size_t entryCount = 0;
2318 size_t dataCount = 0;
2319 status_t ret;
2320
2321 #define ADD_OR_SIZE( tag, data, count ) \
2322 if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
2323 tag, data, count) ) != OK ) return ret
2324
2325 /** android.request */
2326
2327 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
2328 ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
2329
2330 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
2331 ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
2332
2333 static const int32_t id = 0;
2334 ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
2335
2336 static const int32_t frameCount = 0;
2337 ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
2338
2339 // OUTPUT_STREAMS set by user
2340 entryCount += 1;
2341 dataCount += 5; // TODO: Should be maximum stream number
2342
2343 /** android.lens */
2344
2345 static const float focusDistance = 0;
2346 ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
2347
2348 static const float aperture = 2.8f;
2349 ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
2350
2351 static const float focalLength = 5.0f;
2352 ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
2353
2354 static const float filterDensity = 0;
2355 ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
2356
2357 static const uint8_t opticalStabilizationMode =
2358 ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
2359 ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
2360 &opticalStabilizationMode, 1);
2361
2362 // FOCUS_RANGE set only in frame
2363
2364 /** android.sensor */
2365
2366 static const int64_t exposureTime = 10 * MSEC;
2367 ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
2368
2369 static const int64_t frameDuration = 33333333L; // 1/30 s
2370 ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
2371
2372 static const int32_t sensitivity = 100;
2373 ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
2374
2375 // TIMESTAMP set only in frame
2376
2377 /** android.flash */
2378
2379 static const uint8_t flashMode = ANDROID_FLASH_OFF;
2380 ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
2381
2382 static const uint8_t flashPower = 10;
2383 ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
2384
2385 static const int64_t firingTime = 0;
2386 ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
2387
2388 /** Processing block modes */
2389 uint8_t hotPixelMode = 0;
2390 uint8_t demosaicMode = 0;
2391 uint8_t noiseMode = 0;
2392 uint8_t shadingMode = 0;
2393 uint8_t geometricMode = 0;
2394 uint8_t colorMode = 0;
2395 uint8_t tonemapMode = 0;
2396 uint8_t edgeMode = 0;
2397 switch (request_template) {
2398 case CAMERA2_TEMPLATE_PREVIEW:
2399 hotPixelMode = ANDROID_PROCESSING_FAST;
2400 demosaicMode = ANDROID_PROCESSING_FAST;
2401 noiseMode = ANDROID_PROCESSING_FAST;
2402 shadingMode = ANDROID_PROCESSING_FAST;
2403 geometricMode = ANDROID_PROCESSING_FAST;
2404 colorMode = ANDROID_PROCESSING_FAST;
2405 tonemapMode = ANDROID_PROCESSING_FAST;
2406 edgeMode = ANDROID_PROCESSING_FAST;
2407 break;
2408 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2409 hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
2410 demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
2411 noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
2412 shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
2413 geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
2414 colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
2415 tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
2416 edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
2417 break;
2418 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2419 hotPixelMode = ANDROID_PROCESSING_FAST;
2420 demosaicMode = ANDROID_PROCESSING_FAST;
2421 noiseMode = ANDROID_PROCESSING_FAST;
2422 shadingMode = ANDROID_PROCESSING_FAST;
2423 geometricMode = ANDROID_PROCESSING_FAST;
2424 colorMode = ANDROID_PROCESSING_FAST;
2425 tonemapMode = ANDROID_PROCESSING_FAST;
2426 edgeMode = ANDROID_PROCESSING_FAST;
2427 break;
2428 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2429 hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
2430 demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
2431 noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
2432 shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
2433 geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
2434 colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
2435 tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
2436 edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
2437 break;
2438 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2439 hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
2440 demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
2441 noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
2442 shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
2443 geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
2444 colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
2445 tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
2446 edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
2447 break;
2448 default:
2449 hotPixelMode = ANDROID_PROCESSING_FAST;
2450 demosaicMode = ANDROID_PROCESSING_FAST;
2451 noiseMode = ANDROID_PROCESSING_FAST;
2452 shadingMode = ANDROID_PROCESSING_FAST;
2453 geometricMode = ANDROID_PROCESSING_FAST;
2454 colorMode = ANDROID_PROCESSING_FAST;
2455 tonemapMode = ANDROID_PROCESSING_FAST;
2456 edgeMode = ANDROID_PROCESSING_FAST;
2457 break;
2458 }
2459 ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
2460 ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
2461 ADD_OR_SIZE(ANDROID_NOISE_MODE, &noiseMode, 1);
2462 ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
2463 ADD_OR_SIZE(ANDROID_GEOMETRIC_MODE, &geometricMode, 1);
2464 ADD_OR_SIZE(ANDROID_COLOR_MODE, &colorMode, 1);
2465 ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
2466 ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
2467
2468 /** android.noise */
2469 static const uint8_t noiseStrength = 5;
2470 ADD_OR_SIZE(ANDROID_NOISE_STRENGTH, &noiseStrength, 1);
2471
2472 /** android.color */
2473 static const float colorTransform[9] = {
2474 1.0f, 0.f, 0.f,
2475 0.f, 1.f, 0.f,
2476 0.f, 0.f, 1.f
2477 };
2478 ADD_OR_SIZE(ANDROID_COLOR_TRANSFORM, colorTransform, 9);
2479
2480 /** android.tonemap */
2481 static const float tonemapCurve[4] = {
2482 0.f, 0.f,
2483 1.f, 1.f
2484 };
2485 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
2486 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
2487 ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
2488
2489 /** android.edge */
2490 static const uint8_t edgeStrength = 5;
2491 ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
2492
2493 /** android.scaler */
2494 static const int32_t cropRegion[3] = {
2495 0, 0, Sensor::kResolution[0]
2496 };
2497 ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
2498
2499 /** android.jpeg */
2500 static const int32_t jpegQuality = 80;
2501 ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
2502
2503 static const int32_t thumbnailSize[2] = {
2504 640, 480
2505 };
2506 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
2507
2508 static const int32_t thumbnailQuality = 80;
2509 ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
2510
2511 static const double gpsCoordinates[2] = {
2512 0, 0
2513 };
2514 ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
2515
2516 static const uint8_t gpsProcessingMethod[32] = "None";
2517 ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
2518
2519 static const int64_t gpsTimestamp = 0;
2520 ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
2521
2522 static const int32_t jpegOrientation = 0;
2523 ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
2524
2525 /** android.stats */
2526
2527 static const uint8_t faceDetectMode = ANDROID_STATS_FACE_DETECTION_OFF;
2528 ADD_OR_SIZE(ANDROID_STATS_FACE_DETECT_MODE, &faceDetectMode, 1);
2529
2530 static const uint8_t histogramMode = ANDROID_STATS_OFF;
2531 ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_MODE, &histogramMode, 1);
2532
2533 static const uint8_t sharpnessMapMode = ANDROID_STATS_OFF;
2534 ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
2535
2536 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
2537 // sharpnessMap only in frames
2538
2539 /** android.control */
2540
2541 uint8_t controlIntent = 0;
2542 switch (request_template) {
2543 case CAMERA2_TEMPLATE_PREVIEW:
2544 controlIntent = ANDROID_CONTROL_INTENT_PREVIEW;
2545 break;
2546 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2547 controlIntent = ANDROID_CONTROL_INTENT_STILL_CAPTURE;
2548 break;
2549 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2550 controlIntent = ANDROID_CONTROL_INTENT_VIDEO_RECORD;
2551 break;
2552 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2553 controlIntent = ANDROID_CONTROL_INTENT_VIDEO_SNAPSHOT;
2554 break;
2555 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2556 controlIntent = ANDROID_CONTROL_INTENT_ZERO_SHUTTER_LAG;
2557 break;
2558 default:
2559 controlIntent = ANDROID_CONTROL_INTENT_CUSTOM;
2560 break;
2561 }
2562 ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
2563
2564 static const uint8_t controlMode = ANDROID_CONTROL_AUTO;
2565 ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
2566
2567 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_OFF;
2568 ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
2569
2570 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
2571 ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
2572
2573 static const uint8_t aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH;
2574 ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
2575
2576 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
2577 ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
2578
2579 static const int32_t controlRegions[5] = {
2580 0, 0, Sensor::kResolution[0], Sensor::kResolution[1], 1000
2581 };
2582 ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
2583
2584 static const int32_t aeExpCompensation = 0;
2585 ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION, &aeExpCompensation, 1);
2586
2587 static const int32_t aeTargetFpsRange[2] = {
2588 10, 30
2589 };
2590 ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
2591
2592 static const uint8_t aeAntibandingMode =
2593 ANDROID_CONTROL_AE_ANTIBANDING_AUTO;
2594 ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
2595
2596 static const uint8_t awbMode =
2597 ANDROID_CONTROL_AWB_AUTO;
2598 ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
2599
2600 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
2601 ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
2602
2603 ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
2604
2605 uint8_t afMode = 0;
2606 switch (request_template) {
2607 case CAMERA2_TEMPLATE_PREVIEW:
2608 afMode = ANDROID_CONTROL_AF_AUTO;
2609 break;
2610 case CAMERA2_TEMPLATE_STILL_CAPTURE:
2611 afMode = ANDROID_CONTROL_AF_AUTO;
2612 break;
2613 case CAMERA2_TEMPLATE_VIDEO_RECORD:
2614 afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
2615 break;
2616 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
2617 afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
2618 break;
2619 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
2620 afMode = ANDROID_CONTROL_AF_CONTINUOUS_PICTURE;
2621 break;
2622 default:
2623 afMode = ANDROID_CONTROL_AF_AUTO;
2624 break;
2625 }
2626 ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
2627
2628 ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
2629
2630 static const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
2631 ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
2632
2633 // aeState, awbState, afState only in frame
2634
2635 /** Allocate metadata if sizing */
2636 if (sizeRequest) {
2637 ALOGV("Allocating %d entries, %d extra bytes for "
2638 "request template type %d",
2639 entryCount, dataCount, request_template);
2640 *request = allocate_camera_metadata(entryCount, dataCount);
2641 if (*request == NULL) {
2642 ALOGE("Unable to allocate new request template type %d "
2643 "(%d entries, %d bytes extra data)", request_template,
2644 entryCount, dataCount);
2645 return NO_MEMORY;
2646 }
2647 }
2648 return OK;
2649 #undef ADD_OR_SIZE
2650 }
2651
addOrSize(camera_metadata_t * request,bool sizeRequest,size_t * entryCount,size_t * dataCount,uint32_t tag,const void * entryData,size_t entryDataCount)2652 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
2653 bool sizeRequest,
2654 size_t *entryCount,
2655 size_t *dataCount,
2656 uint32_t tag,
2657 const void *entryData,
2658 size_t entryDataCount) {
2659 status_t res;
2660 if (!sizeRequest) {
2661 return add_camera_metadata_entry(request, tag, entryData,
2662 entryDataCount);
2663 } else {
2664 int type = get_camera_metadata_tag_type(tag);
2665 if (type < 0 ) return BAD_VALUE;
2666 (*entryCount)++;
2667 (*dataCount) += calculate_camera_metadata_entry_data_size(type,
2668 entryDataCount);
2669 return OK;
2670 }
2671 }
2672
isStreamInUse(uint32_t id)2673 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
2674 // Assumes mMutex is locked; otherwise new requests could enter
2675 // configureThread while readoutThread is being checked
2676
2677 // Order of isStreamInUse calls matters
2678 if (mConfigureThread->isStreamInUse(id) ||
2679 mReadoutThread->isStreamInUse(id) ||
2680 mJpegCompressor->isStreamInUse(id) ) {
2681 ALOGE("%s: Stream %d is in use in active requests!",
2682 __FUNCTION__, id);
2683 return true;
2684 }
2685 return false;
2686 }
2687
isReprocessStreamInUse(uint32_t id)2688 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
2689 // TODO: implement
2690 return false;
2691 }
2692
getStreamInfo(uint32_t streamId)2693 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
2694 Mutex::Autolock lock(mMutex);
2695
2696 return mStreams.valueFor(streamId);
2697 }
2698
getReprocessStreamInfo(uint32_t streamId)2699 const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
2700 Mutex::Autolock lock(mMutex);
2701
2702 return mReprocessStreams.valueFor(streamId);
2703 }
2704
2705 }; /* namespace android */
2706