1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedQemuCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
23 // *and* LOG_NNDEBUG to enable very verbose logging.
24
25 //#define LOG_NDEBUG 0
26 //#define LOG_NNDEBUG 0
27
28 #define LOG_TAG "EmulatedCamera_QemuCamera3"
29
30 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
31 #define ALOGVV ALOGV
32 #else
33 #define ALOGVV(...) ((void)0)
34 #endif
35
36 #include "EmulatedCameraFactory.h"
37 #include "EmulatedQemuCamera3.h"
38
39 #include <cmath>
40 #include <cutils/properties.h>
41 #include <inttypes.h>
42 #include <sstream>
43 #include <ui/Fence.h>
44 #include <ui/Rect.h>
45 #include <log/log.h>
46 #include <vector>
47
48 namespace android {
49 /*
50 * Constants for Camera Capabilities
51 */
52
53 const int64_t USEC = 1000LL;
54 const int64_t MSEC = USEC * 1000LL;
55
56 const int32_t EmulatedQemuCamera3::kAvailableFormats[] = {
57 HAL_PIXEL_FORMAT_BLOB,
58 HAL_PIXEL_FORMAT_RGBA_8888,
59 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
60 // These are handled by YCbCr_420_888
61 // HAL_PIXEL_FORMAT_YV12,
62 // HAL_PIXEL_FORMAT_YCrCb_420_SP,
63 HAL_PIXEL_FORMAT_YCbCr_420_888
64 };
65
66 /**
67 * 3A constants
68 */
69
70 // Default exposure and gain targets for different scenarios
71 const nsecs_t EmulatedQemuCamera3::kNormalExposureTime = 10 * MSEC;
72 const nsecs_t EmulatedQemuCamera3::kFacePriorityExposureTime = 30 * MSEC;
73 const int EmulatedQemuCamera3::kNormalSensitivity = 100;
74 const int EmulatedQemuCamera3::kFacePrioritySensitivity = 400;
75 //CTS requires 8 frames timeout in waitForAeStable
76 const float EmulatedQemuCamera3::kExposureTrackRate = 0.2;
77 const int EmulatedQemuCamera3::kPrecaptureMinFrames = 10;
78 const int EmulatedQemuCamera3::kStableAeMaxFrames = 100;
79 const float EmulatedQemuCamera3::kExposureWanderMin = -2;
80 const float EmulatedQemuCamera3::kExposureWanderMax = 1;
81
82 /*****************************************************************************
83 * Constructor/Destructor
84 ****************************************************************************/
85
EmulatedQemuCamera3(int cameraId,struct hw_module_t * module,GraphicBufferMapper * gbm)86 EmulatedQemuCamera3::EmulatedQemuCamera3(int cameraId, struct hw_module_t* module,
87 GraphicBufferMapper* gbm) :
88 EmulatedCamera3(cameraId, module), mGBM(gbm) {
89 ALOGI("Constructing emulated qemu camera 3: ID %d", mCameraID);
90 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
91 mDefaultTemplates[i] = nullptr;
92 }
93 }
94
~EmulatedQemuCamera3()95 EmulatedQemuCamera3::~EmulatedQemuCamera3() {
96 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
97 if (mDefaultTemplates[i] != nullptr) {
98 free_camera_metadata(mDefaultTemplates[i]);
99 }
100 }
101 delete[] mDeviceName;
102 }
103
104 /*****************************************************************************
105 * Public Methods
106 ****************************************************************************/
107
108 /*
109 * Camera Device Lifecycle Methods
110 */
111
parseResolutions(const char * frameDims)112 void EmulatedQemuCamera3::parseResolutions(const char *frameDims) {
113 const size_t kMaxFrameDimsLength = 512;
114 size_t frameDimsLength = strnlen(frameDims, kMaxFrameDimsLength);
115 if (frameDimsLength == kMaxFrameDimsLength) {
116 ALOGE("%s: Frame dimensions string was too long (>= %d)",
117 __FUNCTION__, frameDimsLength);
118 return;
119 } else if (frameDimsLength == 0) {
120 ALOGE("%s: Frame dimensions string was NULL or zero-length",
121 __FUNCTION__);
122 return;
123 }
124 std::stringstream ss(frameDims);
125 std::string input;
126 while (std::getline(ss, input, ',')) {
127 int width = 0;
128 int height = 0;
129 char none = 0;
130 /*
131 * Expect only two results because that means there was nothing after
132 * the height, we don't want any trailing characters. Otherwise, we just
133 * ignore this entry.
134 */
135 if (sscanf(input.c_str(), "%dx%d%c", &width, &height, &none) == 2) {
136 mResolutions.push_back(std::pair<int32_t,int32_t>(width, height));
137 ALOGI("%s: %dx%d", __FUNCTION__, width, height);
138 }
139 else {
140 ALOGE("wrong resolution input %s", input.c_str());
141 }
142 }
143
144 /*
145 * We assume the sensor size of the webcam is the resolution with the
146 * largest area. Any resolution with a dimension that exceeds the sensor
147 * size will be rejected, so Camera API calls will start failing. To work
148 * around this, we remove any resolutions with at least one dimension
149 * exceeding that of the max area resolution.
150 */
151
152 // Find the resolution with the maximum area and use that as the sensor
153 // size.
154 int maxArea = 0;
155 for (const auto &res : mResolutions) {
156 int area = res.first * res.second;
157 if (area > maxArea) {
158 maxArea = area;
159 mSensorWidth = res.first;
160 mSensorHeight = res.second;
161 }
162 }
163
164 // Remove any resolution with a dimension exceeding the sensor size.
165 for (auto res = mResolutions.begin(); res != mResolutions.end(); ) {
166 if (res->first > (int32_t)mSensorWidth ||
167 res->second > (int32_t)mSensorHeight) {
168 // Width and/or height larger than sensor. Remove it.
169 res = mResolutions.erase(res);
170 } else {
171 ++res;
172 }
173 }
174
175 if (mResolutions.empty()) {
176 ALOGE("%s: Qemu camera has no valid resolutions", __FUNCTION__);
177 }
178 }
179
Initialize(const char * deviceName,const char * frameDims,const char * facingDir)180 status_t EmulatedQemuCamera3::Initialize(const char *deviceName,
181 const char *frameDims,
182 const char *facingDir) {
183 if (mStatus != STATUS_ERROR) {
184 ALOGE("%s: Already initialized!", __FUNCTION__);
185 return INVALID_OPERATION;
186 }
187
188 /*
189 * Save parameters for later.
190 */
191 mDeviceName = deviceName;
192 parseResolutions(frameDims);
193 if (strcmp("back", facingDir) == 0) {
194 mFacingBack = true;
195 } else {
196 mFacingBack = false;
197 }
198 // We no longer need these two strings.
199 delete[] frameDims;
200 delete[] facingDir;
201
202 status_t res = getCameraCapabilities();
203 if (res != OK) {
204 ALOGE("%s: Unable to get camera capabilities: %s (%d)",
205 __FUNCTION__, strerror(-res), res);
206 return res;
207 }
208
209 res = constructStaticInfo();
210 if (res != OK) {
211 ALOGE("%s: Unable to allocate static info: %s (%d)",
212 __FUNCTION__, strerror(-res), res);
213 return res;
214 }
215
216 return EmulatedCamera3::Initialize();
217 }
218
connectCamera(hw_device_t ** device)219 status_t EmulatedQemuCamera3::connectCamera(hw_device_t** device) {
220 Mutex::Autolock l(mLock);
221 status_t res;
222
223 if (mStatus != STATUS_CLOSED) {
224 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
225 return INVALID_OPERATION;
226 }
227
228 /*
229 * Initialize sensor.
230 */
231 mSensor = new QemuSensor(mDeviceName, mSensorWidth, mSensorHeight, mGBM);
232 mSensor->setQemuSensorListener(this);
233 res = mSensor->startUp();
234 if (res != NO_ERROR) {
235 return res;
236 }
237
238 mReadoutThread = new ReadoutThread(this);
239 mJpegCompressor = new JpegCompressor(mGBM);
240
241 res = mReadoutThread->run("EmuCam3::readoutThread");
242 if (res != NO_ERROR) return res;
243
244 // Initialize fake 3A
245
246 mFacePriority = false;
247 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
248 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
249 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
250 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
251 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
252 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
253 mAeCounter = 0;
254 mAeTargetExposureTime = kNormalExposureTime;
255 mAeCurrentExposureTime = kNormalExposureTime;
256 mAeCurrentSensitivity = kNormalSensitivity;
257
258 return EmulatedCamera3::connectCamera(device);
259 }
260
closeCamera()261 status_t EmulatedQemuCamera3::closeCamera() {
262 status_t res;
263 {
264 Mutex::Autolock l(mLock);
265 if (mStatus == STATUS_CLOSED) return OK;
266
267 res = mSensor->shutDown();
268 if (res != NO_ERROR) {
269 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
270 return res;
271 }
272 mSensor.clear();
273
274 mReadoutThread->requestExit();
275 }
276
277 mReadoutThread->join();
278
279 {
280 Mutex::Autolock l(mLock);
281 // Clear out private stream information.
282 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
283 PrivateStreamInfo *privStream =
284 static_cast<PrivateStreamInfo*>((*s)->priv);
285 delete privStream;
286 (*s)->priv = nullptr;
287 }
288 mStreams.clear();
289 mReadoutThread.clear();
290 }
291
292 return EmulatedCamera3::closeCamera();
293 }
294
getCameraInfo(struct camera_info * info)295 status_t EmulatedQemuCamera3::getCameraInfo(struct camera_info *info) {
296 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
297 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
298 return EmulatedCamera3::getCameraInfo(info);
299 }
300
301 /*
302 * Camera3 Interface Methods
303 */
304
configureStreams(camera3_stream_configuration * streamList)305 status_t EmulatedQemuCamera3::configureStreams(
306 camera3_stream_configuration *streamList) {
307 Mutex::Autolock l(mLock);
308 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
309
310 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
311 ALOGE("%s: Cannot configure streams in state %d",
312 __FUNCTION__, mStatus);
313 return NO_INIT;
314 }
315
316 /*
317 * Sanity-check input list.
318 */
319 if (streamList == nullptr) {
320 ALOGE("%s: NULL stream configuration", __FUNCTION__);
321 return BAD_VALUE;
322 }
323 if (streamList->streams == nullptr) {
324 ALOGE("%s: NULL stream list", __FUNCTION__);
325 return BAD_VALUE;
326 }
327 if (streamList->num_streams < 1) {
328 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
329 streamList->num_streams);
330 return BAD_VALUE;
331 }
332
333 camera3_stream_t *inputStream = nullptr;
334 for (size_t i = 0; i < streamList->num_streams; ++i) {
335 camera3_stream_t *newStream = streamList->streams[i];
336
337 if (newStream == nullptr) {
338 ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
339 return BAD_VALUE;
340 }
341
342 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
343 __FUNCTION__, newStream, i, newStream->stream_type,
344 newStream->usage, newStream->format);
345
346 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
347 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
348 if (inputStream != nullptr) {
349 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
350 return BAD_VALUE;
351 }
352 inputStream = newStream;
353 }
354
355 bool validFormat = false;
356 size_t numFormats = sizeof(kAvailableFormats) /
357 sizeof(kAvailableFormats[0]);
358 for (size_t f = 0; f < numFormats; ++f) {
359 if (newStream->format == kAvailableFormats[f]) {
360 validFormat = true;
361 break;
362 }
363 }
364 if (!validFormat) {
365 ALOGE("%s: Unsupported stream format 0x%x requested",
366 __FUNCTION__, newStream->format);
367 return BAD_VALUE;
368 }
369 }
370 mInputStream = inputStream;
371
372 /*
373 * Initially mark all existing streams as not alive.
374 */
375 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
376 PrivateStreamInfo *privStream =
377 static_cast<PrivateStreamInfo*>((*s)->priv);
378 privStream->alive = false;
379 }
380
381 /*
382 * Find new streams and mark still-alive ones.
383 */
384 for (size_t i = 0; i < streamList->num_streams; ++i) {
385 camera3_stream_t *newStream = streamList->streams[i];
386 if (newStream->priv == nullptr) {
387 // New stream. Construct info.
388 PrivateStreamInfo *privStream = new PrivateStreamInfo();
389 privStream->alive = true;
390
391 newStream->max_buffers = kMaxBufferCount;
392 newStream->priv = privStream;
393 mStreams.push_back(newStream);
394 } else {
395 // Existing stream, mark as still alive.
396 PrivateStreamInfo *privStream =
397 static_cast<PrivateStreamInfo*>(newStream->priv);
398 privStream->alive = true;
399 }
400 // Always update usage and max buffers.
401 newStream->max_buffers = kMaxBufferCount;
402 switch (newStream->stream_type) {
403 case CAMERA3_STREAM_OUTPUT:
404 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
405 break;
406 case CAMERA3_STREAM_INPUT:
407 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
408 break;
409 case CAMERA3_STREAM_BIDIRECTIONAL:
410 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
411 GRALLOC_USAGE_HW_CAMERA_WRITE;
412 break;
413 }
414 // Set the buffer format, inline with gralloc implementation
415 if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
416 if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
417 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
418 newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
419 }
420 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
421 newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
422 }
423 else {
424 newStream->format = HAL_PIXEL_FORMAT_RGB_888;
425 }
426 }
427 }
428 }
429
430 /*
431 * Reap the dead streams.
432 */
433 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
434 PrivateStreamInfo *privStream =
435 static_cast<PrivateStreamInfo*>((*s)->priv);
436 if (!privStream->alive) {
437 (*s)->priv = nullptr;
438 delete privStream;
439 s = mStreams.erase(s);
440 } else {
441 ++s;
442 }
443 }
444
445 /*
446 * Can't reuse settings across configure call.
447 */
448 mPrevSettings.clear();
449
450 return OK;
451 }
452
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)453 status_t EmulatedQemuCamera3::registerStreamBuffers(
454 const camera3_stream_buffer_set *bufferSet) {
455 Mutex::Autolock l(mLock);
456 ALOGE("%s: Should not be invoked on HAL versions >= 3.2!", __FUNCTION__);
457 return NO_INIT;
458 }
459
constructDefaultRequestSettings(int type)460 const camera_metadata_t* EmulatedQemuCamera3::constructDefaultRequestSettings(
461 int type) {
462 Mutex::Autolock l(mLock);
463
464 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
465 ALOGE("%s: Unknown request settings template: %d",
466 __FUNCTION__, type);
467 return nullptr;
468 }
469
470 if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
471 ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
472 __FUNCTION__, type);
473 return nullptr;
474 }
475
476 /*
477 * Cache is not just an optimization - pointer returned has to live at least
478 * as long as the camera device instance does.
479 */
480 if (mDefaultTemplates[type] != nullptr) {
481 return mDefaultTemplates[type];
482 }
483
484 CameraMetadata settings;
485
486 /* android.request */
487
488 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
489 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
490
491 static const int32_t id = 0;
492 settings.update(ANDROID_REQUEST_ID, &id, 1);
493
494 static const int32_t frameCount = 0;
495 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
496
497 /* android.lens */
498
499 static const float focalLength = 5.0f;
500 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
501
502 if (hasCapability(BACKWARD_COMPATIBLE)) {
503 static const float focusDistance = 0;
504 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
505
506 static const float aperture = 2.8f;
507 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
508
509 static const float filterDensity = 0;
510 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
511
512 static const uint8_t opticalStabilizationMode =
513 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
514 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
515 &opticalStabilizationMode, 1);
516
517 // FOCUS_RANGE set only in frame
518 }
519
520 /* android.flash */
521
522 if (hasCapability(BACKWARD_COMPATIBLE)) {
523 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
524 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
525
526 static const uint8_t flashPower = 10;
527 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
528
529 static const int64_t firingTime = 0;
530 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
531 }
532
533 /* android.scaler */
534 if (hasCapability(BACKWARD_COMPATIBLE)) {
535 const int32_t cropRegion[4] = {
536 0, 0, mSensorWidth, mSensorHeight
537 };
538 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
539 }
540
541 /* android.jpeg */
542 if (hasCapability(BACKWARD_COMPATIBLE)) {
543 static const uint8_t jpegQuality = 80;
544 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
545
546 static const int32_t thumbnailSize[2] = {
547 320, 240
548 };
549 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
550
551 static const uint8_t thumbnailQuality = 80;
552 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
553
554 static const double gpsCoordinates[3] = {
555 0, 0, 0
556 };
557 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
558
559 static const uint8_t gpsProcessingMethod[32] = "None";
560 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
561
562 static const int64_t gpsTimestamp = 0;
563 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
564
565 static const int32_t jpegOrientation = 0;
566 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
567 }
568
569 /* android.stats */
570 if (hasCapability(BACKWARD_COMPATIBLE)) {
571 static const uint8_t faceDetectMode =
572 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
573 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
574
575 static const uint8_t hotPixelMapMode =
576 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
577 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
578 }
579
580 /* android.control */
581
582 uint8_t controlIntent = 0;
583 switch (type) {
584 case CAMERA3_TEMPLATE_PREVIEW:
585 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
586 break;
587 case CAMERA3_TEMPLATE_STILL_CAPTURE:
588 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
589 break;
590 case CAMERA3_TEMPLATE_VIDEO_RECORD:
591 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
592 break;
593 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
594 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
595 break;
596 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
597 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
598 break;
599 case CAMERA3_TEMPLATE_MANUAL:
600 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
601 break;
602 default:
603 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
604 break;
605 }
606 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
607
608 const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
609 ANDROID_CONTROL_MODE_OFF :
610 ANDROID_CONTROL_MODE_AUTO;
611 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
612
613 int32_t aeTargetFpsRange[2] = {
614 5, 30
615 };
616 if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
617 type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
618 aeTargetFpsRange[0] = 30;
619 }
620 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
621
622 if (hasCapability(BACKWARD_COMPATIBLE)) {
623 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
624 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
625
626 const uint8_t sceneMode =
627 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
628 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
629
630 const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
631 ANDROID_CONTROL_AE_MODE_OFF : ANDROID_CONTROL_AE_MODE_ON;
632 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
633
634 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
635 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
636
637 static const int32_t controlRegions[5] = {
638 0, 0, 0, 0, 0
639 };
640 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
641
642 static const int32_t aeExpCompensation = 0;
643 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
644
645
646 static const uint8_t aeAntibandingMode =
647 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
648 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
649
650 static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
651 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
652
653 const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
654 ANDROID_CONTROL_AWB_MODE_OFF :
655 ANDROID_CONTROL_AWB_MODE_AUTO;
656 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
657
658 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
659 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
660
661 uint8_t afMode = 0;
662
663 if (mFacingBack) {
664 switch (type) {
665 case CAMERA3_TEMPLATE_PREVIEW:
666 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
667 break;
668 case CAMERA3_TEMPLATE_STILL_CAPTURE:
669 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
670 break;
671 case CAMERA3_TEMPLATE_VIDEO_RECORD:
672 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
673 break;
674 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
675 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
676 break;
677 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
678 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
679 break;
680 case CAMERA3_TEMPLATE_MANUAL:
681 afMode = ANDROID_CONTROL_AF_MODE_OFF;
682 break;
683 default:
684 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
685 break;
686 }
687 } else {
688 afMode = ANDROID_CONTROL_AF_MODE_OFF;
689 }
690 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
691 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
692
693 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
694 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
695
696 static const uint8_t vstabMode =
697 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
698 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
699 &vstabMode, 1);
700
701 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
702 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
703
704 static const uint8_t lensShadingMapMode =
705 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
706 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
707 &lensShadingMapMode, 1);
708
709 static const uint8_t aberrationMode =
710 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
711 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
712 &aberrationMode, 1);
713
714 static const int32_t testPatternMode =
715 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
716 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
717 }
718
719 mDefaultTemplates[type] = settings.release();
720
721 return mDefaultTemplates[type];
722 }
723
processCaptureRequest(camera3_capture_request * request)724 status_t EmulatedQemuCamera3::processCaptureRequest(
725 camera3_capture_request *request) {
726 Mutex::Autolock l(mLock);
727 status_t res;
728
729 /* Validation */
730
731 if (mStatus < STATUS_READY) {
732 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
733 mStatus);
734 return INVALID_OPERATION;
735 }
736
737 if (request == nullptr) {
738 ALOGE("%s: NULL request!", __FUNCTION__);
739 return BAD_VALUE;
740 }
741
742 uint32_t frameNumber = request->frame_number;
743
744 if (request->settings == nullptr && mPrevSettings.isEmpty()) {
745 ALOGE("%s: Request %d: NULL settings for first request after"
746 "configureStreams()", __FUNCTION__, frameNumber);
747 return BAD_VALUE;
748 }
749
750 if (request->input_buffer != nullptr &&
751 request->input_buffer->stream != mInputStream) {
752 ALOGE("%s: Request %d: Input buffer not from input stream!",
753 __FUNCTION__, frameNumber);
754 ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
755 request->input_buffer->stream, mInputStream);
756 ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
757 request->input_buffer->stream->stream_type,
758 mInputStream ? mInputStream->stream_type : -1);
759
760 return BAD_VALUE;
761 }
762
763 if (request->num_output_buffers < 1 || request->output_buffers == nullptr) {
764 ALOGE("%s: Request %d: No output buffers provided!",
765 __FUNCTION__, frameNumber);
766 return BAD_VALUE;
767 }
768
769 /*
770 * Validate all buffers, starting with input buffer if it's given.
771 */
772
773 ssize_t idx;
774 const camera3_stream_buffer_t *b;
775 if (request->input_buffer != nullptr) {
776 idx = -1;
777 b = request->input_buffer;
778 } else {
779 idx = 0;
780 b = request->output_buffers;
781 }
782 do {
783 PrivateStreamInfo *priv =
784 static_cast<PrivateStreamInfo*>(b->stream->priv);
785 if (priv == nullptr) {
786 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
787 __FUNCTION__, frameNumber, idx);
788 return BAD_VALUE;
789 }
790 if (!priv->alive) {
791 ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
792 __FUNCTION__, frameNumber, idx);
793 return BAD_VALUE;
794 }
795 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
796 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
797 __FUNCTION__, frameNumber, idx);
798 return BAD_VALUE;
799 }
800 if (b->release_fence != -1) {
801 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
802 __FUNCTION__, frameNumber, idx);
803 return BAD_VALUE;
804 }
805 if (b->buffer == nullptr) {
806 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
807 __FUNCTION__, frameNumber, idx);
808 return BAD_VALUE;
809 }
810 idx++;
811 b = &(request->output_buffers[idx]);
812 } while (idx < (ssize_t)request->num_output_buffers);
813
814 // TODO: Validate settings parameters.
815
816 /*
817 * Start processing this request.
818 */
819
820 mStatus = STATUS_ACTIVE;
821
822 CameraMetadata settings;
823
824 if (request->settings == nullptr) {
825 settings.acquire(mPrevSettings);
826 } else {
827 settings = request->settings;
828 }
829
830 res = process3A(settings);
831 if (res != OK) {
832 return res;
833 }
834
835 /*
836 * Get ready for sensor config.
837 */
838 // TODO: We shouldn't need exposureTime or frameDuration for webcams.
839 nsecs_t exposureTime;
840 nsecs_t frameDuration;
841 bool needJpeg = false;
842 camera_metadata_entry_t entry;
843
844 entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
845 exposureTime = (entry.count > 0) ?
846 entry.data.i64[0] :
847 QemuSensor::kExposureTimeRange[0];
848
849 // Note: Camera consumers may rely on there being an exposure
850 // time set in the camera metadata.
851 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
852
853 entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
854 frameDuration = (entry.count > 0) ?
855 entry.data.i64[0] :
856 QemuSensor::kFrameDurationRange[0];
857
858 if (exposureTime > frameDuration) {
859 frameDuration = exposureTime + QemuSensor::kMinVerticalBlank;
860 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
861 }
862
863 static const int32_t sensitivity = QemuSensor::kSensitivityRange[0];
864 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
865
866 static const uint8_t colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
867 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
868
869 static const float colorGains[4] = {
870 1.0f, 1.0f, 1.0f, 1.0f
871 };
872 settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
873
874 static const camera_metadata_rational colorTransform[9] = {
875 {1,1}, {0,1}, {0,1},
876 {0,1}, {1,1}, {0,1},
877 {0,1}, {0,1}, {1,1}
878 };
879 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
880
881 static const camera_metadata_rational neutralColorPoint[3] = {
882 {1,1}, {1,1}, {1,1},
883 };
884 settings.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, neutralColorPoint, 3);
885
886 Buffers *sensorBuffers = new Buffers();
887 HalBufferVector *buffers = new HalBufferVector();
888
889 sensorBuffers->setCapacity(request->num_output_buffers);
890 buffers->setCapacity(request->num_output_buffers);
891
892 /*
893 * Process all the buffers we got for output, constructing internal buffer
894 * structures for them, and lock them for writing.
895 */
896 for (size_t i = 0; i < request->num_output_buffers; ++i) {
897 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
898 StreamBuffer destBuf;
899 destBuf.streamId = kGenericStreamId;
900 destBuf.width = srcBuf.stream->width;
901 destBuf.height = srcBuf.stream->height;
902 // inline with goldfish gralloc
903 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
904 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
905 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
906 destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
907 }
908 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
909 destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
910 }
911 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
912 == GRALLOC_USAGE_HW_CAMERA_ZSL) {
913 destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
914 }
915 }
916 }
917 else {
918 destBuf.format = srcBuf.stream->format;
919 }
920
921 destBuf.stride = srcBuf.stream->width;
922 destBuf.dataSpace = srcBuf.stream->data_space;
923 destBuf.buffer = srcBuf.buffer;
924
925 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
926 needJpeg = true;
927 }
928
929 // Wait on fence.
930 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
931 res = bufferAcquireFence->wait(kFenceTimeoutMs);
932 if (res == TIMED_OUT) {
933 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
934 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
935 }
936 if (res == OK) {
937 // Lock buffer for writing.
938 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
939 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
940 android_ycbcr ycbcr = {};
941 res = mGBM->lockYCbCr(
942 *(destBuf.buffer),
943 GRALLOC_USAGE_HW_CAMERA_WRITE |
944 GRALLOC_USAGE_SW_READ_OFTEN |
945 GRALLOC_USAGE_SW_WRITE_OFTEN,
946 Rect(0, 0, destBuf.width, destBuf.height),
947 &ycbcr);
948 /*
949 * This is only valid because we know that emulator's
950 * YCbCr_420_888 is really contiguous NV21 under the hood.
951 */
952 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
953 } else {
954 ALOGE("Unexpected private format for flexible YUV: 0x%x",
955 destBuf.format);
956 res = INVALID_OPERATION;
957 }
958 } else {
959 res = mGBM->lock(
960 *(destBuf.buffer),
961 GRALLOC_USAGE_HW_CAMERA_WRITE |
962 GRALLOC_USAGE_SW_READ_OFTEN |
963 GRALLOC_USAGE_SW_WRITE_OFTEN,
964 Rect(0, 0, destBuf.width, destBuf.height),
965 (void**)&(destBuf.img));
966
967 }
968 if (res != OK) {
969 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
970 __FUNCTION__, frameNumber, i);
971 }
972 }
973
974 if (res != OK) {
975 /*
976 * Either waiting or locking failed. Unlock locked buffers and bail
977 * out.
978 */
979 for (size_t j = 0; j < i; j++) {
980 mGBM->unlock(*(request->output_buffers[i].buffer));
981 }
982 delete sensorBuffers;
983 delete buffers;
984 return NO_INIT;
985 }
986
987 sensorBuffers->push_back(destBuf);
988 buffers->push_back(srcBuf);
989 }
990
991 /*
992 * Wait for JPEG compressor to not be busy, if needed.
993 */
994 if (needJpeg) {
995 bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
996 if (!ready) {
997 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
998 __FUNCTION__);
999 return NO_INIT;
1000 }
1001 res = mJpegCompressor->reserve();
1002 if (res != OK) {
1003 ALOGE("%s: Error managing JPEG compressor resources, can't "
1004 "reserve it!", __FUNCTION__);
1005 return NO_INIT;
1006 }
1007 }
1008
1009 /*
1010 * TODO: We shouldn't need to wait for sensor readout with a webcam, because
1011 * we might be wasting time.
1012 */
1013
1014 /*
1015 * Wait until the in-flight queue has room.
1016 */
1017 res = mReadoutThread->waitForReadout();
1018 if (res != OK) {
1019 ALOGE("%s: Timeout waiting for previous requests to complete!",
1020 __FUNCTION__);
1021 return NO_INIT;
1022 }
1023
1024 /*
1025 * Wait until sensor's ready. This waits for lengthy amounts of time with
1026 * mLock held, but the interface spec is that no other calls may by done to
1027 * the HAL by the framework while process_capture_request is happening.
1028 */
1029 int syncTimeoutCount = 0;
1030 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1031 if (mStatus == STATUS_ERROR) {
1032 return NO_INIT;
1033 }
1034 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1035 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1036 __FUNCTION__, frameNumber,
1037 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1038 return NO_INIT;
1039 }
1040 syncTimeoutCount++;
1041 }
1042
1043 /*
1044 * Configure sensor and queue up the request to the readout thread.
1045 */
1046 mSensor->setFrameDuration(frameDuration);
1047 mSensor->setDestinationBuffers(sensorBuffers);
1048 mSensor->setFrameNumber(request->frame_number);
1049
1050 ReadoutThread::Request r;
1051 r.frameNumber = request->frame_number;
1052 r.settings = settings;
1053 r.sensorBuffers = sensorBuffers;
1054 r.buffers = buffers;
1055
1056 mReadoutThread->queueCaptureRequest(r);
1057 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1058
1059 // Cache the settings for next time.
1060 mPrevSettings.acquire(settings);
1061
1062 return OK;
1063 }
1064
flush()1065 status_t EmulatedQemuCamera3::flush() {
1066 ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1067 return OK;
1068 }
1069
1070 /*****************************************************************************
1071 * Private Methods
1072 ****************************************************************************/
1073
getCameraCapabilities()1074 status_t EmulatedQemuCamera3::getCameraCapabilities() {
1075 const char *key = mFacingBack ? "qemu.sf.back_camera_caps" :
1076 "qemu.sf.front_camera_caps";
1077
1078 /*
1079 * Defined by 'qemu.sf.*_camera_caps' boot property: if the property doesn't
1080 * exist, it is assumed to list FULL.
1081 */
1082 char prop[PROPERTY_VALUE_MAX];
1083 if (property_get(key, prop, nullptr) > 0) {
1084 char *saveptr = nullptr;
1085 char *cap = strtok_r(prop, " ,", &saveptr);
1086 while (cap != nullptr) {
1087 for (int i = 0; i < NUM_CAPABILITIES; ++i) {
1088 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1089 mCapabilities.add(static_cast<AvailableCapabilities>(i));
1090 break;
1091 }
1092 }
1093 cap = strtok_r(nullptr, " ,", &saveptr);
1094 }
1095 if (mCapabilities.size() == 0) {
1096 ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1097 }
1098 }
1099
1100 mCapabilities.add(BACKWARD_COMPATIBLE);
1101
1102 ALOGI("Camera %d capabilities:", mCameraID);
1103 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1104 ALOGI(" %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1105 }
1106
1107 return OK;
1108 }
1109
hasCapability(AvailableCapabilities cap)1110 bool EmulatedQemuCamera3::hasCapability(AvailableCapabilities cap) {
1111 ssize_t idx = mCapabilities.indexOf(cap);
1112 return idx >= 0;
1113 }
1114
constructStaticInfo()1115 status_t EmulatedQemuCamera3::constructStaticInfo() {
1116 CameraMetadata info;
1117 Vector<int32_t> availableCharacteristicsKeys;
1118 status_t res;
1119
1120 #define ADD_STATIC_ENTRY(name, varptr, count) \
1121 availableCharacteristicsKeys.add(name); \
1122 res = info.update(name, varptr, count); \
1123 if (res != OK) return res
1124
1125 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1126 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1127 sensorPhysicalSize, 2);
1128
1129 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1130 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1131 pixelArray, 2);
1132 const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1133 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1134 activeArray, 4);
1135
1136 static const int32_t orientation = 90; // Aligned with 'long edge'.
1137 ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1138
1139 static const uint8_t timestampSource =
1140 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1141 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
1142
1143 if (hasCapability(BACKWARD_COMPATIBLE)) {
1144 static const int32_t availableTestPatternModes[] = {
1145 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
1146 };
1147 ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1148 availableTestPatternModes,
1149 sizeof(availableTestPatternModes) / sizeof(int32_t));
1150 }
1151
1152 /* android.lens */
1153
1154 static const float focalLengths = 5.0f; // mm
1155 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1156 &focalLengths, 1);
1157
1158 if (hasCapability(BACKWARD_COMPATIBLE)) {
1159 // infinity (fixed focus)
1160 static const float minFocusDistance = 0.0;
1161 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1162 &minFocusDistance, 1);
1163
1164 // (fixed focus)
1165 static const float hyperFocalDistance = 0.0;
1166 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1167 &hyperFocalDistance, 1);
1168
1169 static const float apertures = 2.8f;
1170 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1171 &apertures, 1);
1172 static const float filterDensities = 0;
1173 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1174 &filterDensities, 1);
1175 static const uint8_t availableOpticalStabilization =
1176 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1177 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1178 &availableOpticalStabilization, 1);
1179
1180 static const int32_t lensShadingMapSize[] = {1, 1};
1181 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1182 sizeof(lensShadingMapSize) / sizeof(int32_t));
1183
1184 static const uint8_t lensFocusCalibration =
1185 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1186 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1187 &lensFocusCalibration, 1);
1188 }
1189
1190 const uint8_t lensFacing = mFacingBack ?
1191 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1192 ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1193
1194 /* android.flash */
1195
1196 static const uint8_t flashAvailable = 0;
1197 ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1198
1199 /* android.scaler */
1200
1201 std::vector<int32_t> availableStreamConfigurations;
1202 std::vector<int64_t> availableMinFrameDurations;
1203 std::vector<int64_t> availableStallDurations;
1204
1205 /*
1206 * Build stream configurations, min frame durations, and stall durations for
1207 * all resolutions reported by camera device.
1208 */
1209 for (const auto &res : mResolutions) {
1210 int32_t width = res.first, height = res.second;
1211 std::vector<int32_t> currentResStreamConfigurations = {
1212 HAL_PIXEL_FORMAT_BLOB, width, height,
1213 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1214
1215 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1216 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1217
1218 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1219 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1220
1221 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1222 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1223 };
1224 std::vector<int32_t> currentResMinFrameDurations = {
1225 HAL_PIXEL_FORMAT_BLOB, width, height,
1226 QemuSensor::kFrameDurationRange[0],
1227
1228 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1229 QemuSensor::kFrameDurationRange[0],
1230
1231 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1232 QemuSensor::kFrameDurationRange[0],
1233
1234 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1235 QemuSensor::kFrameDurationRange[0]
1236 };
1237 std::vector<int32_t> currentResStallDurations = {
1238 // We should only introduce stall times with JPEG-compressed frames.
1239 HAL_PIXEL_FORMAT_BLOB, width, height,
1240 QemuSensor::kFrameDurationRange[0],
1241
1242 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
1243
1244 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
1245
1246 HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
1247 };
1248 availableStreamConfigurations.insert(
1249 availableStreamConfigurations.end(),
1250 currentResStreamConfigurations.begin(),
1251 currentResStreamConfigurations.end());
1252 availableMinFrameDurations.insert(
1253 availableMinFrameDurations.end(),
1254 currentResMinFrameDurations.begin(),
1255 currentResMinFrameDurations.end());
1256 availableStallDurations.insert(
1257 availableStallDurations.end(),
1258 currentResStallDurations.begin(),
1259 currentResStallDurations.end());
1260 }
1261
1262 /*
1263 * Now, if nonempty, add them to the camera's available characteristics.
1264 */
1265 if (availableStreamConfigurations.size() > 0) {
1266 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1267 availableStreamConfigurations.data(),
1268 availableStreamConfigurations.size());
1269 }
1270 if (availableMinFrameDurations.size() > 0) {
1271 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1272 &availableMinFrameDurations[0],
1273 availableMinFrameDurations.size());
1274 }
1275 if (availableStallDurations.size() > 0) {
1276 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1277 &availableStallDurations[0],
1278 availableStallDurations.size());
1279 }
1280
1281 if (hasCapability(BACKWARD_COMPATIBLE)) {
1282 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1283 ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
1284 &croppingType, 1);
1285
1286 static const float maxZoom = 10;
1287 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1288 &maxZoom, 1);
1289 }
1290
1291 /* android.jpeg */
1292
1293 if (hasCapability(BACKWARD_COMPATIBLE)) {
1294 static const int32_t jpegThumbnailSizes[] = {
1295 0, 0,
1296 160, 120,
1297 320, 240
1298 };
1299 ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1300 jpegThumbnailSizes,
1301 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1302
1303 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1304 ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1305 }
1306
1307 /* android.stats */
1308
1309 if (hasCapability(BACKWARD_COMPATIBLE)) {
1310 static const uint8_t availableFaceDetectModes[] = {
1311 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF
1312 };
1313 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1314 availableFaceDetectModes,
1315 sizeof(availableFaceDetectModes));
1316
1317 static const int32_t maxFaceCount = 0;
1318 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1319 &maxFaceCount, 1);
1320
1321 static const uint8_t availableShadingMapModes[] = {
1322 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
1323 };
1324 ADD_STATIC_ENTRY(
1325 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1326 availableShadingMapModes, sizeof(availableShadingMapModes));
1327 }
1328
1329 /* android.sync */
1330
1331 const int32_t maxLatency =
1332 hasCapability(FULL_LEVEL) ?
1333 ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
1334 ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1335
1336 /* android.control */
1337
1338 if (hasCapability(BACKWARD_COMPATIBLE)) {
1339 const uint8_t availableControlModes[] = {
1340 ANDROID_CONTROL_MODE_OFF,
1341 ANDROID_CONTROL_MODE_AUTO,
1342 ANDROID_CONTROL_MODE_USE_SCENE_MODE
1343 };
1344 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1345 availableControlModes, sizeof(availableControlModes));
1346 } else {
1347 const uint8_t availableControlModes[] = {
1348 ANDROID_CONTROL_MODE_AUTO
1349 };
1350 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1351 availableControlModes, sizeof(availableControlModes));
1352 }
1353
1354 const uint8_t availableSceneModes[] = {
1355 hasCapability(BACKWARD_COMPATIBLE) ?
1356 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
1357 ANDROID_CONTROL_SCENE_MODE_DISABLED
1358 };
1359 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1360 availableSceneModes, sizeof(availableSceneModes));
1361
1362 if (hasCapability(BACKWARD_COMPATIBLE)) {
1363 static const uint8_t availableEffects[] = {
1364 ANDROID_CONTROL_EFFECT_MODE_OFF
1365 };
1366 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1367 availableEffects, sizeof(availableEffects));
1368 }
1369
1370 if (hasCapability(BACKWARD_COMPATIBLE)) {
1371 static const int32_t max3aRegions[] = {
1372 /* AE */ 1,
1373 /* AWB */ 0,
1374 /* AF */ 1
1375 };
1376 ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
1377 max3aRegions,
1378 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1379
1380 static const uint8_t availableAeModes[] = {
1381 ANDROID_CONTROL_AE_MODE_OFF,
1382 ANDROID_CONTROL_AE_MODE_ON
1383 };
1384 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1385 availableAeModes, sizeof(availableAeModes));
1386
1387 static const camera_metadata_rational exposureCompensationStep = {1, 3};
1388 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1389 &exposureCompensationStep, 1);
1390
1391 static int32_t exposureCompensationRange[] = {-9, 9};
1392 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1393 exposureCompensationRange,
1394 sizeof(exposureCompensationRange) / sizeof(int32_t));
1395 }
1396
1397 static const int32_t availableTargetFpsRanges[] = {
1398 5, 30, 15, 30, 15, 15, 30, 30
1399 };
1400 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1401 availableTargetFpsRanges,
1402 sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1403
1404 if (hasCapability(BACKWARD_COMPATIBLE)) {
1405 static const uint8_t availableAntibandingModes[] = {
1406 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1407 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1408 };
1409 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1410 availableAntibandingModes, sizeof(availableAntibandingModes));
1411 }
1412
1413 static const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1414
1415 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
1416 &aeLockAvailable, 1);
1417
1418 if (hasCapability(BACKWARD_COMPATIBLE)) {
1419 static const uint8_t availableAwbModes[] = {
1420 ANDROID_CONTROL_AWB_MODE_OFF,
1421 ANDROID_CONTROL_AWB_MODE_AUTO,
1422 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1423 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1424 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1425 ANDROID_CONTROL_AWB_MODE_SHADE,
1426 };
1427 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1428 availableAwbModes, sizeof(availableAwbModes));
1429 }
1430
1431 static const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1432
1433 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
1434 &awbLockAvailable, 1);
1435
1436 static const uint8_t availableAfModesBack[] = {
1437 ANDROID_CONTROL_AF_MODE_OFF
1438 };
1439
1440 static const uint8_t availableAfModesFront[] = {
1441 ANDROID_CONTROL_AF_MODE_OFF
1442 };
1443
1444 if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1445 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1446 availableAfModesBack, sizeof(availableAfModesBack));
1447 } else {
1448 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1449 availableAfModesFront, sizeof(availableAfModesFront));
1450 }
1451
1452 static const uint8_t availableVstabModes[] = {
1453 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
1454 };
1455 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1456 availableVstabModes, sizeof(availableVstabModes));
1457
1458 /* android.colorCorrection */
1459
1460 if (hasCapability(BACKWARD_COMPATIBLE)) {
1461 const uint8_t availableAberrationModes[] = {
1462 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1463 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1464 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
1465 };
1466 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1467 availableAberrationModes, sizeof(availableAberrationModes));
1468 } else {
1469 const uint8_t availableAberrationModes[] = {
1470 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1471 };
1472 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1473 availableAberrationModes, sizeof(availableAberrationModes));
1474 }
1475
1476 /* android.edge */
1477
1478 if (hasCapability(BACKWARD_COMPATIBLE)) {
1479 const uint8_t availableEdgeModes[] = {
1480 ANDROID_EDGE_MODE_OFF,
1481 ANDROID_EDGE_MODE_FAST,
1482 ANDROID_EDGE_MODE_HIGH_QUALITY,
1483 };
1484 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1485 availableEdgeModes, sizeof(availableEdgeModes));
1486 } else {
1487 const uint8_t availableEdgeModes[] = {
1488 ANDROID_EDGE_MODE_OFF
1489 };
1490 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1491 availableEdgeModes, sizeof(availableEdgeModes));
1492 }
1493
1494 /* android.info */
1495
1496 static const uint8_t supportedHardwareLevel =
1497 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1498 ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1499 &supportedHardwareLevel, /* count */ 1);
1500
1501 /* android.noiseReduction */
1502
1503 if (hasCapability(BACKWARD_COMPATIBLE)) {
1504 const uint8_t availableNoiseReductionModes[] = {
1505 ANDROID_NOISE_REDUCTION_MODE_OFF,
1506 ANDROID_NOISE_REDUCTION_MODE_FAST,
1507 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
1508 };
1509 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1510 availableNoiseReductionModes,
1511 sizeof(availableNoiseReductionModes));
1512 } else {
1513 const uint8_t availableNoiseReductionModes[] = {
1514 ANDROID_NOISE_REDUCTION_MODE_OFF
1515 };
1516 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1517 availableNoiseReductionModes,
1518 sizeof(availableNoiseReductionModes));
1519 }
1520
1521 /* android.shading */
1522
1523 if (hasCapability(BACKWARD_COMPATIBLE)) {
1524 const uint8_t availableShadingModes[] = {
1525 ANDROID_SHADING_MODE_OFF,
1526 ANDROID_SHADING_MODE_FAST,
1527 ANDROID_SHADING_MODE_HIGH_QUALITY
1528 };
1529 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1530 sizeof(availableShadingModes));
1531 } else {
1532 const uint8_t availableShadingModes[] = {
1533 ANDROID_SHADING_MODE_OFF
1534 };
1535 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1536 sizeof(availableShadingModes));
1537 }
1538
1539 /* android.request */
1540
1541 static const int32_t maxNumOutputStreams[] = {
1542 kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
1543 };
1544 ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
1545 maxNumOutputStreams, 3);
1546
1547 static const uint8_t maxPipelineDepth = kMaxBufferCount;
1548 ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1549
1550 static const int32_t partialResultCount = 1;
1551 ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
1552 &partialResultCount, /* count */ 1);
1553
1554 SortedVector<uint8_t> caps;
1555 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1556 switch (mCapabilities[i]) {
1557 case BACKWARD_COMPATIBLE:
1558 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1559 break;
1560 case PRIVATE_REPROCESSING:
1561 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1562 break;
1563 case READ_SENSOR_SETTINGS:
1564 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1565 break;
1566 case BURST_CAPTURE:
1567 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1568 break;
1569 case YUV_REPROCESSING:
1570 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1571 break;
1572 case CONSTRAINED_HIGH_SPEED_VIDEO:
1573 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1574 break;
1575 default:
1576 // Ignore LEVELs.
1577 break;
1578 }
1579 }
1580 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
1581
1582 // Scan a default request template for included request keys.
1583 Vector<int32_t> availableRequestKeys;
1584 const camera_metadata_t *previewRequest =
1585 constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1586 for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); ++i) {
1587 camera_metadata_ro_entry_t entry;
1588 get_camera_metadata_ro_entry(previewRequest, i, &entry);
1589 availableRequestKeys.add(entry.tag);
1590 }
1591 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
1592 availableRequestKeys.size());
1593
1594 /*
1595 * Add a few more result keys. Must be kept up to date with the various
1596 * places that add these.
1597 */
1598
1599 Vector<int32_t> availableResultKeys(availableRequestKeys);
1600 if (hasCapability(BACKWARD_COMPATIBLE)) {
1601 availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1602 availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1603 availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1604 availableResultKeys.add(ANDROID_FLASH_STATE);
1605 availableResultKeys.add(ANDROID_LENS_STATE);
1606 availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1607 availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1608 availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1609 }
1610
1611 availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1612 availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1613
1614 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
1615 availableResultKeys.size());
1616
1617 // Needs to be last, to collect all the keys set.
1618
1619 availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1620 info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1621 availableCharacteristicsKeys);
1622
1623 mCameraInfo = info.release();
1624
1625 #undef ADD_STATIC_ENTRY
1626 return OK;
1627 }
1628
process3A(CameraMetadata & settings)1629 status_t EmulatedQemuCamera3::process3A(CameraMetadata &settings) {
1630 /**
1631 * Extract top-level 3A controls
1632 */
1633 status_t res;
1634
1635 camera_metadata_entry e;
1636
1637 e = settings.find(ANDROID_CONTROL_MODE);
1638 if (e.count == 0) {
1639 ALOGE("%s: No control mode entry!", __FUNCTION__);
1640 return BAD_VALUE;
1641 }
1642 uint8_t controlMode = e.data.u8[0];
1643
1644 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1645 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1646 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1647 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1648 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1649 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1650 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1651 update3A(settings);
1652 return OK;
1653 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1654 if (!hasCapability(BACKWARD_COMPATIBLE)) {
1655 ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1656 __FUNCTION__);
1657 return BAD_VALUE;
1658 }
1659
1660 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1661 if (e.count == 0) {
1662 ALOGE("%s: No scene mode entry!", __FUNCTION__);
1663 return BAD_VALUE;
1664 }
1665 uint8_t sceneMode = e.data.u8[0];
1666
1667 switch(sceneMode) {
1668 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1669 mFacePriority = true;
1670 break;
1671 default:
1672 ALOGE("%s: Emulator doesn't support scene mode %d",
1673 __FUNCTION__, sceneMode);
1674 return BAD_VALUE;
1675 }
1676 } else {
1677 mFacePriority = false;
1678 }
1679
1680 // controlMode == AUTO or sceneMode = FACE_PRIORITY
1681 // Process individual 3A controls
1682
1683 res = doFakeAE(settings);
1684 if (res != OK) return res;
1685
1686 res = doFakeAF(settings);
1687 if (res != OK) return res;
1688
1689 res = doFakeAWB(settings);
1690 if (res != OK) return res;
1691
1692 update3A(settings);
1693 return OK;
1694 }
1695
doFakeAE(CameraMetadata & settings)1696 status_t EmulatedQemuCamera3::doFakeAE(CameraMetadata &settings) {
1697 camera_metadata_entry e;
1698
1699 e = settings.find(ANDROID_CONTROL_AE_MODE);
1700 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1701 ALOGE("%s: No AE mode entry!", __FUNCTION__);
1702 return BAD_VALUE;
1703 }
1704 uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1705 mAeMode = aeMode;
1706
1707 switch (aeMode) {
1708 case ANDROID_CONTROL_AE_MODE_OFF:
1709 // AE is OFF
1710 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1711 return OK;
1712 case ANDROID_CONTROL_AE_MODE_ON:
1713 // OK for AUTO modes
1714 break;
1715 default:
1716 // Mostly silently ignore unsupported modes
1717 ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
1718 __FUNCTION__, aeMode);
1719 break;
1720 }
1721
1722 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
1723 bool precaptureTrigger = false;
1724 if (e.count != 0) {
1725 precaptureTrigger =
1726 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
1727 }
1728
1729 if (precaptureTrigger) {
1730 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
1731 } else if (e.count > 0) {
1732 ALOGV("%s: Pre capture trigger was present? %zu",
1733 __FUNCTION__, e.count);
1734 }
1735
1736 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1737 // Run precapture sequence
1738 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1739 mAeCounter = 0;
1740 }
1741
1742 if (mFacePriority) {
1743 mAeTargetExposureTime = kFacePriorityExposureTime;
1744 } else {
1745 mAeTargetExposureTime = kNormalExposureTime;
1746 }
1747
1748 if (mAeCounter > kPrecaptureMinFrames &&
1749 (mAeTargetExposureTime - mAeCurrentExposureTime) <
1750 mAeTargetExposureTime / 10) {
1751 // Done with precapture
1752 mAeCounter = 0;
1753 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1754 } else {
1755 // Converge some more
1756 mAeCurrentExposureTime +=
1757 (mAeTargetExposureTime - mAeCurrentExposureTime) *
1758 kExposureTrackRate;
1759 mAeCounter++;
1760 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1761 }
1762 }
1763 else {
1764 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1765 }
1766
1767 return OK;
1768 }
1769
doFakeAF(CameraMetadata & settings)1770 status_t EmulatedQemuCamera3::doFakeAF(CameraMetadata &settings) {
1771 camera_metadata_entry e;
1772
1773 e = settings.find(ANDROID_CONTROL_AF_MODE);
1774 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1775 ALOGE("%s: No AF mode entry!", __FUNCTION__);
1776 return BAD_VALUE;
1777 }
1778 uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
1779
1780 switch (afMode) {
1781 case ANDROID_CONTROL_AF_MODE_OFF:
1782 case ANDROID_CONTROL_AF_MODE_AUTO:
1783 case ANDROID_CONTROL_AF_MODE_MACRO:
1784 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1785 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1786 // Always report INACTIVE for Qemu Camera
1787 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1788 break;
1789 default:
1790 ALOGE("%s: Emulator doesn't support AF mode %d",
1791 __FUNCTION__, afMode);
1792 return BAD_VALUE;
1793 }
1794
1795 return OK;
1796 }
1797
doFakeAWB(CameraMetadata & settings)1798 status_t EmulatedQemuCamera3::doFakeAWB(CameraMetadata &settings) {
1799 camera_metadata_entry e;
1800
1801 e = settings.find(ANDROID_CONTROL_AWB_MODE);
1802 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1803 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
1804 return BAD_VALUE;
1805 }
1806 uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
1807
1808 // TODO: Add white balance simulation
1809
1810 switch (awbMode) {
1811 case ANDROID_CONTROL_AWB_MODE_OFF:
1812 case ANDROID_CONTROL_AWB_MODE_AUTO:
1813 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
1814 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
1815 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
1816 case ANDROID_CONTROL_AWB_MODE_SHADE:
1817 // Always magically right for Qemu Camera
1818 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
1819 break;
1820 default:
1821 ALOGE("%s: Emulator doesn't support AWB mode %d",
1822 __FUNCTION__, awbMode);
1823 return BAD_VALUE;
1824 }
1825
1826 return OK;
1827 }
1828
update3A(CameraMetadata & settings)1829 void EmulatedQemuCamera3::update3A(CameraMetadata &settings) {
1830 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
1831 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
1832 &mAeCurrentExposureTime, 1);
1833 settings.update(ANDROID_SENSOR_SENSITIVITY,
1834 &mAeCurrentSensitivity, 1);
1835 }
1836
1837 settings.update(ANDROID_CONTROL_AE_STATE,
1838 &mAeState, 1);
1839 settings.update(ANDROID_CONTROL_AF_STATE,
1840 &mAfState, 1);
1841 settings.update(ANDROID_CONTROL_AWB_STATE,
1842 &mAwbState, 1);
1843
1844 uint8_t lensState;
1845 switch (mAfState) {
1846 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1847 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1848 lensState = ANDROID_LENS_STATE_MOVING;
1849 break;
1850 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1851 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1852 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1853 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1854 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1855 default:
1856 lensState = ANDROID_LENS_STATE_STATIONARY;
1857 break;
1858 }
1859 settings.update(ANDROID_LENS_STATE, &lensState, 1);
1860 }
1861
signalReadoutIdle()1862 void EmulatedQemuCamera3::signalReadoutIdle() {
1863 Mutex::Autolock l(mLock);
1864 /*
1865 * Need to check isIdle again because waiting on mLock may have allowed
1866 * something to be placed in the in-flight queue.
1867 */
1868 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
1869 ALOGV("Now idle");
1870 mStatus = STATUS_READY;
1871 }
1872 }
1873
onQemuSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)1874 void EmulatedQemuCamera3::onQemuSensorEvent(uint32_t frameNumber, Event e,
1875 nsecs_t timestamp) {
1876 switch (e) {
1877 case QemuSensor::QemuSensorListener::EXPOSURE_START:
1878 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
1879 __FUNCTION__, frameNumber, timestamp);
1880 // Trigger shutter notify to framework.
1881 camera3_notify_msg_t msg;
1882 msg.type = CAMERA3_MSG_SHUTTER;
1883 msg.message.shutter.frame_number = frameNumber;
1884 msg.message.shutter.timestamp = timestamp;
1885 sendNotify(&msg);
1886 break;
1887 default:
1888 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
1889 e, timestamp);
1890 break;
1891 }
1892 }
1893
ReadoutThread(EmulatedQemuCamera3 * parent)1894 EmulatedQemuCamera3::ReadoutThread::ReadoutThread(EmulatedQemuCamera3 *parent) :
1895 mParent(parent), mJpegWaiting(false) {
1896 ALOGV("%s: Creating readout thread", __FUNCTION__);
1897 }
1898
~ReadoutThread()1899 EmulatedQemuCamera3::ReadoutThread::~ReadoutThread() {
1900 for (List<Request>::iterator i = mInFlightQueue.begin();
1901 i != mInFlightQueue.end(); ++i) {
1902 delete i->buffers;
1903 delete i->sensorBuffers;
1904 }
1905 }
1906
queueCaptureRequest(const Request & r)1907 void EmulatedQemuCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
1908 Mutex::Autolock l(mLock);
1909
1910 mInFlightQueue.push_back(r);
1911 mInFlightSignal.signal();
1912 }
1913
isIdle()1914 bool EmulatedQemuCamera3::ReadoutThread::isIdle() {
1915 Mutex::Autolock l(mLock);
1916 return mInFlightQueue.empty() && !mThreadActive;
1917 }
1918
waitForReadout()1919 status_t EmulatedQemuCamera3::ReadoutThread::waitForReadout() {
1920 status_t res;
1921 Mutex::Autolock l(mLock);
1922 int loopCount = 0;
1923 while (mInFlightQueue.size() >= kMaxQueueSize) {
1924 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1925 if (res != OK && res != TIMED_OUT) {
1926 ALOGE("%s: Error waiting for in-flight queue to shrink",
1927 __FUNCTION__);
1928 return INVALID_OPERATION;
1929 }
1930 if (loopCount == kMaxWaitLoops) {
1931 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
1932 __FUNCTION__);
1933 return TIMED_OUT;
1934 }
1935 loopCount++;
1936 }
1937 return OK;
1938 }
1939
threadLoop()1940 bool EmulatedQemuCamera3::ReadoutThread::threadLoop() {
1941 status_t res;
1942
1943 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
1944
1945 // First wait for a request from the in-flight queue.
1946
1947 if (mCurrentRequest.settings.isEmpty()) {
1948 Mutex::Autolock l(mLock);
1949 if (mInFlightQueue.empty()) {
1950 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1951 if (res == TIMED_OUT) {
1952 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
1953 __FUNCTION__);
1954 return true;
1955 } else if (res != NO_ERROR) {
1956 ALOGE("%s: Error waiting for capture requests: %d",
1957 __FUNCTION__, res);
1958 return false;
1959 }
1960 }
1961 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
1962 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
1963 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
1964 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
1965 mInFlightQueue.erase(mInFlightQueue.begin());
1966 mInFlightSignal.signal();
1967 mThreadActive = true;
1968 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
1969 mCurrentRequest.frameNumber);
1970 }
1971
1972 // Then wait for it to be delivered from the sensor.
1973 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
1974 __FUNCTION__);
1975
1976 nsecs_t captureTime;
1977 bool gotFrame =
1978 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
1979 if (!gotFrame) {
1980 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
1981 __FUNCTION__);
1982 return true;
1983 }
1984
1985 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
1986 mCurrentRequest.frameNumber, captureTime);
1987
1988 /*
1989 * Check if we need to JPEG encode a buffer, and send it for async
1990 * compression if so. Otherwise prepare the buffer for return.
1991 */
1992 bool needJpeg = false;
1993 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
1994 while (buf != mCurrentRequest.buffers->end()) {
1995 bool goodBuffer = true;
1996 if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
1997 buf->stream->data_space != HAL_DATASPACE_DEPTH) {
1998 Mutex::Autolock jl(mJpegLock);
1999 if (mJpegWaiting) {
2000 /*
2001 * This shouldn't happen, because processCaptureRequest should
2002 * be stalling until JPEG compressor is free.
2003 */
2004 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2005 goodBuffer = false;
2006 }
2007 if (goodBuffer) {
2008 // Compressor takes ownership of sensorBuffers here.
2009 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2010 this, &(mCurrentRequest.settings));
2011 goodBuffer = (res == OK);
2012 }
2013 if (goodBuffer) {
2014 needJpeg = true;
2015
2016 mJpegHalBuffer = *buf;
2017 mJpegFrameNumber = mCurrentRequest.frameNumber;
2018 mJpegWaiting = true;
2019
2020 mCurrentRequest.sensorBuffers = nullptr;
2021 buf = mCurrentRequest.buffers->erase(buf);
2022
2023 continue;
2024 }
2025 ALOGE("%s: Error compressing output buffer: %s (%d)",
2026 __FUNCTION__, strerror(-res), res);
2027 // Fallthrough for cleanup.
2028 }
2029 mParent->mGBM->unlock(*(buf->buffer));
2030
2031 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2032 CAMERA3_BUFFER_STATUS_ERROR;
2033 buf->acquire_fence = -1;
2034 buf->release_fence = -1;
2035
2036 ++buf;
2037 }
2038
2039 // Construct result for all completed buffers and results.
2040
2041 camera3_capture_result result;
2042
2043 if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2044 static const uint8_t sceneFlicker =
2045 ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2046 mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2047 &sceneFlicker, 1);
2048
2049 static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2050 mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
2051 &flashState, 1);
2052
2053 nsecs_t rollingShutterSkew = 0;
2054 mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2055 &rollingShutterSkew, 1);
2056
2057 float focusRange[] = { 1.0f / 5.0f, 0 }; // 5 m to infinity in focus
2058 mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2059 sizeof(focusRange) / sizeof(float));
2060 }
2061
2062 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2063 &captureTime, 1);
2064
2065
2066 // JPEGs take a stage longer.
2067 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2068 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2069 &pipelineDepth, 1);
2070
2071 result.frame_number = mCurrentRequest.frameNumber;
2072 result.result = mCurrentRequest.settings.getAndLock();
2073 result.num_output_buffers = mCurrentRequest.buffers->size();
2074 result.output_buffers = mCurrentRequest.buffers->array();
2075 result.input_buffer = nullptr;
2076 result.partial_result = 1;
2077
2078 // Go idle if queue is empty, before sending result.
2079 bool signalIdle = false;
2080 {
2081 Mutex::Autolock l(mLock);
2082 if (mInFlightQueue.empty()) {
2083 mThreadActive = false;
2084 signalIdle = true;
2085 }
2086 }
2087 if (signalIdle) mParent->signalReadoutIdle();
2088
2089 // Send it off to the framework.
2090 ALOGVV("%s: ReadoutThread: Send result to framework",
2091 __FUNCTION__);
2092 mParent->sendCaptureResult(&result);
2093
2094 // Clean up.
2095 mCurrentRequest.settings.unlock(result.result);
2096
2097 delete mCurrentRequest.buffers;
2098 mCurrentRequest.buffers = nullptr;
2099 if (!needJpeg) {
2100 delete mCurrentRequest.sensorBuffers;
2101 mCurrentRequest.sensorBuffers = nullptr;
2102 }
2103 mCurrentRequest.settings.clear();
2104
2105 return true;
2106 }
2107
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2108 void EmulatedQemuCamera3::ReadoutThread::onJpegDone(
2109 const StreamBuffer &jpegBuffer, bool success) {
2110 Mutex::Autolock jl(mJpegLock);
2111
2112 mParent->mGBM->unlock(*(jpegBuffer.buffer));
2113
2114 mJpegHalBuffer.status = success ?
2115 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2116 mJpegHalBuffer.acquire_fence = -1;
2117 mJpegHalBuffer.release_fence = -1;
2118 mJpegWaiting = false;
2119
2120 camera3_capture_result result;
2121
2122 result.frame_number = mJpegFrameNumber;
2123 result.result = nullptr;
2124 result.num_output_buffers = 1;
2125 result.output_buffers = &mJpegHalBuffer;
2126 result.input_buffer = nullptr;
2127 result.partial_result = 0;
2128
2129 if (!success) {
2130 ALOGE("%s: Compression failure, returning error state buffer to"
2131 " framework", __FUNCTION__);
2132 } else {
2133 ALOGV("%s: Compression complete, returning buffer to framework",
2134 __FUNCTION__);
2135 }
2136
2137 mParent->sendCaptureResult(&result);
2138 }
2139
onJpegInputDone(const StreamBuffer & inputBuffer)2140 void EmulatedQemuCamera3::ReadoutThread::onJpegInputDone(
2141 const StreamBuffer &inputBuffer) {
2142 /*
2143 * Should never get here, since the input buffer has to be returned by end
2144 * of processCaptureRequest.
2145 */
2146 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2147 }
2148
2149 }; // end of namespace android
2150