1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Contains implementation of a class EmulatedQemuCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
23 // *and* LOG_NNDEBUG to enable very verbose logging.
24
25 //#define LOG_NDEBUG 0
26 //#define LOG_NNDEBUG 0
27
28 #define LOG_TAG "EmulatedCamera_QemuCamera3"
29
30 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
31 #define ALOGVV ALOGV
32 #else
33 #define ALOGVV(...) ((void)0)
34 #endif
35
36 #include "EmulatedCameraFactory.h"
37 #include "GrallocModule.h"
38 #include "EmulatedQemuCamera3.h"
39
40 #include <cmath>
41 #include <cutils/properties.h>
42 #include <inttypes.h>
43 #include <sstream>
44 #include <ui/Fence.h>
45 #include <utils/Log.h>
46 #include <vector>
47
48 namespace android {
49
50 /*
51 * Constants for Camera Capabilities
52 */
53
54 const int64_t USEC = 1000LL;
55 const int64_t MSEC = USEC * 1000LL;
56 const int64_t SEC = MSEC * 1000LL;
57
58 const int32_t EmulatedQemuCamera3::kAvailableFormats[] = {
59 HAL_PIXEL_FORMAT_BLOB,
60 HAL_PIXEL_FORMAT_RGBA_8888,
61 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
62 // These are handled by YCbCr_420_888
63 // HAL_PIXEL_FORMAT_YV12,
64 // HAL_PIXEL_FORMAT_YCrCb_420_SP,
65 HAL_PIXEL_FORMAT_YCbCr_420_888
66 };
67
68 /**
69 * 3A constants
70 */
71
72 // Default exposure and gain targets for different scenarios
73 const nsecs_t EmulatedQemuCamera3::kNormalExposureTime = 10 * MSEC;
74 const nsecs_t EmulatedQemuCamera3::kFacePriorityExposureTime = 30 * MSEC;
75 const int EmulatedQemuCamera3::kNormalSensitivity = 100;
76 const int EmulatedQemuCamera3::kFacePrioritySensitivity = 400;
77 //CTS requires 8 frames timeout in waitForAeStable
78 const float EmulatedQemuCamera3::kExposureTrackRate = 0.2;
79 const int EmulatedQemuCamera3::kPrecaptureMinFrames = 10;
80 const int EmulatedQemuCamera3::kStableAeMaxFrames = 100;
81 const float EmulatedQemuCamera3::kExposureWanderMin = -2;
82 const float EmulatedQemuCamera3::kExposureWanderMax = 1;
83
84 /*****************************************************************************
85 * Constructor/Destructor
86 ****************************************************************************/
87
EmulatedQemuCamera3(int cameraId,struct hw_module_t * module)88 EmulatedQemuCamera3::EmulatedQemuCamera3(int cameraId, struct hw_module_t* module) :
89 EmulatedCamera3(cameraId, module) {
90 ALOGI("Constructing emulated qemu camera 3: ID %d", mCameraID);
91
92 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
93 mDefaultTemplates[i] = nullptr;
94 }
95 }
96
~EmulatedQemuCamera3()97 EmulatedQemuCamera3::~EmulatedQemuCamera3() {
98 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
99 if (mDefaultTemplates[i] != nullptr) {
100 free_camera_metadata(mDefaultTemplates[i]);
101 }
102 }
103 delete[] mDeviceName;
104 }
105
106 /*****************************************************************************
107 * Public Methods
108 ****************************************************************************/
109
110 /*
111 * Camera Device Lifecycle Methods
112 */
113
parseResolutions(const char * frameDims)114 void EmulatedQemuCamera3::parseResolutions(const char *frameDims) {
115 const size_t kMaxFrameDimsLength = 512;
116 size_t frameDimsLength = strnlen(frameDims, kMaxFrameDimsLength);
117 if (frameDimsLength == kMaxFrameDimsLength) {
118 ALOGE("%s: Frame dimensions string was too long (>= %d)",
119 __FUNCTION__, frameDimsLength);
120 return;
121 } else if (frameDimsLength == 0) {
122 ALOGE("%s: Frame dimensions string was NULL or zero-length",
123 __FUNCTION__);
124 return;
125 }
126 std::stringstream ss(frameDims);
127 std::string input;
128 while (std::getline(ss, input, ',')) {
129 int width = 0;
130 int height = 0;
131 char none = 0;
132 /*
133 * Expect only two results because that means there was nothing after
134 * the height, we don't want any trailing characters. Otherwise, we just
135 * ignore this entry.
136 */
137 if (sscanf(input.c_str(), "%dx%d%c", &width, &height, &none) == 2) {
138 mResolutions.push_back(std::pair<int32_t,int32_t>(width, height));
139 ALOGE("%s: %dx%d", __FUNCTION__, width, height);
140 }
141 }
142
143 /*
144 * We assume the sensor size of the webcam is the resolution with the
145 * largest area. Any resolution with a dimension that exceeds the sensor
146 * size will be rejected, so Camera API calls will start failing. To work
147 * around this, we remove any resolutions with at least one dimension
148 * exceeding that of the max area resolution.
149 */
150
151 // Find the resolution with the maximum area and use that as the sensor
152 // size.
153 int maxArea = 0;
154 for (const auto &res : mResolutions) {
155 int area = res.first * res.second;
156 if (area > maxArea) {
157 maxArea = area;
158 mSensorWidth = res.first;
159 mSensorHeight = res.second;
160 }
161 }
162
163 // Remove any resolution with a dimension exceeding the sensor size.
164 for (auto res = mResolutions.begin(); res != mResolutions.end(); ) {
165 if (res->first > mSensorWidth || res->second > mSensorHeight) {
166 // Width and/or height larger than sensor. Remove it.
167 res = mResolutions.erase(res);
168 } else {
169 ++res;
170 }
171 }
172
173 if (mResolutions.empty()) {
174 ALOGE("%s: Qemu camera has no valid resolutions", __FUNCTION__);
175 }
176 }
177
Initialize(const char * deviceName,const char * frameDims,const char * facingDir)178 status_t EmulatedQemuCamera3::Initialize(const char *deviceName,
179 const char *frameDims,
180 const char *facingDir) {
181 if (mStatus != STATUS_ERROR) {
182 ALOGE("%s: Already initialized!", __FUNCTION__);
183 return INVALID_OPERATION;
184 }
185
186 /*
187 * Save parameters for later.
188 */
189 mDeviceName = deviceName;
190 parseResolutions(frameDims);
191 if (strcmp("back", facingDir) == 0) {
192 mFacingBack = true;
193 } else {
194 mFacingBack = false;
195 }
196 // We no longer need these two strings.
197 delete[] frameDims;
198 delete[] facingDir;
199
200 status_t res = getCameraCapabilities();
201 if (res != OK) {
202 ALOGE("%s: Unable to get camera capabilities: %s (%d)",
203 __FUNCTION__, strerror(-res), res);
204 return res;
205 }
206
207 res = constructStaticInfo();
208 if (res != OK) {
209 ALOGE("%s: Unable to allocate static info: %s (%d)",
210 __FUNCTION__, strerror(-res), res);
211 return res;
212 }
213
214 return EmulatedCamera3::Initialize();
215 }
216
connectCamera(hw_device_t ** device)217 status_t EmulatedQemuCamera3::connectCamera(hw_device_t** device) {
218 Mutex::Autolock l(mLock);
219 status_t res;
220
221 if (mStatus != STATUS_CLOSED) {
222 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
223 return INVALID_OPERATION;
224 }
225
226 /*
227 * Initialize sensor.
228 */
229 mSensor = new QemuSensor(mDeviceName, mSensorWidth, mSensorHeight);
230 mSensor->setQemuSensorListener(this);
231 res = mSensor->startUp();
232 if (res != NO_ERROR) {
233 return res;
234 }
235
236 mReadoutThread = new ReadoutThread(this);
237 mJpegCompressor = new JpegCompressor();
238
239 res = mReadoutThread->run("EmuCam3::readoutThread");
240 if (res != NO_ERROR) return res;
241
242 return EmulatedCamera3::connectCamera(device);
243 }
244
closeCamera()245 status_t EmulatedQemuCamera3::closeCamera() {
246 status_t res;
247 {
248 Mutex::Autolock l(mLock);
249 if (mStatus == STATUS_CLOSED) return OK;
250
251 res = mSensor->shutDown();
252 if (res != NO_ERROR) {
253 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
254 return res;
255 }
256 mSensor.clear();
257
258 mReadoutThread->requestExit();
259 }
260
261 mReadoutThread->join();
262
263 {
264 Mutex::Autolock l(mLock);
265 // Clear out private stream information.
266 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
267 PrivateStreamInfo *privStream =
268 static_cast<PrivateStreamInfo*>((*s)->priv);
269 delete privStream;
270 (*s)->priv = nullptr;
271 }
272 mStreams.clear();
273 mReadoutThread.clear();
274 }
275
276 return EmulatedCamera3::closeCamera();
277 }
278
getCameraInfo(struct camera_info * info)279 status_t EmulatedQemuCamera3::getCameraInfo(struct camera_info *info) {
280 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
281 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
282 return EmulatedCamera3::getCameraInfo(info);
283 }
284
285 /*
286 * Camera3 Interface Methods
287 */
288
configureStreams(camera3_stream_configuration * streamList)289 status_t EmulatedQemuCamera3::configureStreams(
290 camera3_stream_configuration *streamList) {
291 Mutex::Autolock l(mLock);
292 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
293
294 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
295 ALOGE("%s: Cannot configure streams in state %d",
296 __FUNCTION__, mStatus);
297 return NO_INIT;
298 }
299
300 /*
301 * Sanity-check input list.
302 */
303 if (streamList == nullptr) {
304 ALOGE("%s: NULL stream configuration", __FUNCTION__);
305 return BAD_VALUE;
306 }
307 if (streamList->streams == nullptr) {
308 ALOGE("%s: NULL stream list", __FUNCTION__);
309 return BAD_VALUE;
310 }
311 if (streamList->num_streams < 1) {
312 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
313 streamList->num_streams);
314 return BAD_VALUE;
315 }
316
317 camera3_stream_t *inputStream = nullptr;
318 for (size_t i = 0; i < streamList->num_streams; ++i) {
319 camera3_stream_t *newStream = streamList->streams[i];
320
321 if (newStream == nullptr) {
322 ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
323 return BAD_VALUE;
324 }
325
326 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
327 __FUNCTION__, newStream, i, newStream->stream_type,
328 newStream->usage, newStream->format);
329
330 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
331 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
332 if (inputStream != nullptr) {
333 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
334 return BAD_VALUE;
335 }
336 inputStream = newStream;
337 }
338
339 bool validFormat = false;
340 size_t numFormats = sizeof(kAvailableFormats) /
341 sizeof(kAvailableFormats[0]);
342 for (size_t f = 0; f < numFormats; ++f) {
343 if (newStream->format == kAvailableFormats[f]) {
344 validFormat = true;
345 break;
346 }
347 }
348 if (!validFormat) {
349 ALOGE("%s: Unsupported stream format 0x%x requested",
350 __FUNCTION__, newStream->format);
351 return BAD_VALUE;
352 }
353 }
354 mInputStream = inputStream;
355
356 /*
357 * Initially mark all existing streams as not alive.
358 */
359 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
360 PrivateStreamInfo *privStream =
361 static_cast<PrivateStreamInfo*>((*s)->priv);
362 privStream->alive = false;
363 }
364
365 /*
366 * Find new streams and mark still-alive ones.
367 */
368 for (size_t i = 0; i < streamList->num_streams; ++i) {
369 camera3_stream_t *newStream = streamList->streams[i];
370 if (newStream->priv == nullptr) {
371 // New stream. Construct info.
372 PrivateStreamInfo *privStream = new PrivateStreamInfo();
373 privStream->alive = true;
374
375 newStream->max_buffers = kMaxBufferCount;
376 newStream->priv = privStream;
377 mStreams.push_back(newStream);
378 } else {
379 // Existing stream, mark as still alive.
380 PrivateStreamInfo *privStream =
381 static_cast<PrivateStreamInfo*>(newStream->priv);
382 privStream->alive = true;
383 }
384 // Always update usage and max buffers.
385 newStream->max_buffers = kMaxBufferCount;
386 switch (newStream->stream_type) {
387 case CAMERA3_STREAM_OUTPUT:
388 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
389 break;
390 case CAMERA3_STREAM_INPUT:
391 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
392 break;
393 case CAMERA3_STREAM_BIDIRECTIONAL:
394 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
395 GRALLOC_USAGE_HW_CAMERA_WRITE;
396 break;
397 }
398 // Set the buffer format, inline with gralloc implementation
399 if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
400 if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
401 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
402 newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;
403 }
404 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
405 newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
406 }
407 else {
408 newStream->format = HAL_PIXEL_FORMAT_RGB_888;
409 }
410 }
411 }
412 }
413
414 /*
415 * Reap the dead streams.
416 */
417 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
418 PrivateStreamInfo *privStream =
419 static_cast<PrivateStreamInfo*>((*s)->priv);
420 if (!privStream->alive) {
421 (*s)->priv = nullptr;
422 delete privStream;
423 s = mStreams.erase(s);
424 } else {
425 ++s;
426 }
427 }
428
429 /*
430 * Can't reuse settings across configure call.
431 */
432 mPrevSettings.clear();
433
434 return OK;
435 }
436
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)437 status_t EmulatedQemuCamera3::registerStreamBuffers(
438 const camera3_stream_buffer_set *bufferSet) {
439 Mutex::Autolock l(mLock);
440 ALOGE("%s: Should not be invoked on HAL versions >= 3.2!", __FUNCTION__);
441 return NO_INIT;
442 }
443
constructDefaultRequestSettings(int type)444 const camera_metadata_t* EmulatedQemuCamera3::constructDefaultRequestSettings(
445 int type) {
446 Mutex::Autolock l(mLock);
447
448 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
449 ALOGE("%s: Unknown request settings template: %d",
450 __FUNCTION__, type);
451 return nullptr;
452 }
453
454 if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
455 ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
456 __FUNCTION__, type);
457 return nullptr;
458 }
459
460 /*
461 * Cache is not just an optimization - pointer returned has to live at least
462 * as long as the camera device instance does.
463 */
464 if (mDefaultTemplates[type] != nullptr) {
465 return mDefaultTemplates[type];
466 }
467
468 CameraMetadata settings;
469
470 /* android.request */
471
472 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
473 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
474
475 static const int32_t id = 0;
476 settings.update(ANDROID_REQUEST_ID, &id, 1);
477
478 static const int32_t frameCount = 0;
479 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
480
481 /* android.lens */
482
483 static const float focalLength = 5.0f;
484 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
485
486 if (hasCapability(BACKWARD_COMPATIBLE)) {
487 static const float focusDistance = 0;
488 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
489
490 static const float aperture = 2.8f;
491 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
492
493 static const float filterDensity = 0;
494 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
495
496 static const uint8_t opticalStabilizationMode =
497 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
498 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
499 &opticalStabilizationMode, 1);
500
501 // FOCUS_RANGE set only in frame
502 }
503
504 /* android.flash */
505
506 if (hasCapability(BACKWARD_COMPATIBLE)) {
507 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
508 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
509
510 static const uint8_t flashPower = 10;
511 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
512
513 static const int64_t firingTime = 0;
514 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
515 }
516
517 /* android.scaler */
518 if (hasCapability(BACKWARD_COMPATIBLE)) {
519 static const int32_t cropRegion[4] = {
520 0, 0, mSensorWidth, mSensorHeight
521 };
522 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
523 }
524
525 /* android.jpeg */
526 if (hasCapability(BACKWARD_COMPATIBLE)) {
527 static const uint8_t jpegQuality = 80;
528 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
529
530 static const int32_t thumbnailSize[2] = {
531 320, 240
532 };
533 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
534
535 static const uint8_t thumbnailQuality = 80;
536 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
537
538 static const double gpsCoordinates[3] = {
539 0, 0, 0
540 };
541 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
542
543 static const uint8_t gpsProcessingMethod[32] = "None";
544 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
545
546 static const int64_t gpsTimestamp = 0;
547 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
548
549 static const int32_t jpegOrientation = 0;
550 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
551 }
552
553 /* android.stats */
554 if (hasCapability(BACKWARD_COMPATIBLE)) {
555 static const uint8_t faceDetectMode =
556 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
557 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
558
559 static const uint8_t hotPixelMapMode =
560 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
561 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
562 }
563
564 /* android.control */
565
566 uint8_t controlIntent = 0;
567 switch (type) {
568 case CAMERA3_TEMPLATE_PREVIEW:
569 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
570 break;
571 case CAMERA3_TEMPLATE_STILL_CAPTURE:
572 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
573 break;
574 case CAMERA3_TEMPLATE_VIDEO_RECORD:
575 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
576 break;
577 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
578 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
579 break;
580 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
581 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
582 break;
583 case CAMERA3_TEMPLATE_MANUAL:
584 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
585 break;
586 default:
587 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
588 break;
589 }
590 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
591
592 const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
593 ANDROID_CONTROL_MODE_OFF :
594 ANDROID_CONTROL_MODE_AUTO;
595 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
596
597 int32_t aeTargetFpsRange[2] = {
598 5, 30
599 };
600 if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
601 type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
602 aeTargetFpsRange[0] = 30;
603 }
604 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
605
606 if (hasCapability(BACKWARD_COMPATIBLE)) {
607 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
608 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
609
610 static const uint8_t sceneMode =
611 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
612 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
613
614 const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
615 ANDROID_CONTROL_AE_MODE_OFF : ANDROID_CONTROL_AE_MODE_ON;
616 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
617
618 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
619 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
620
621 static const int32_t controlRegions[5] = {
622 0, 0, 0, 0, 0
623 };
624 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
625
626 static const int32_t aeExpCompensation = 0;
627 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
628
629
630 static const uint8_t aeAntibandingMode =
631 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
632 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
633
634 static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
635 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
636
637 const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
638 ANDROID_CONTROL_AWB_MODE_OFF :
639 ANDROID_CONTROL_AWB_MODE_AUTO;
640 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
641
642 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
643 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
644
645 uint8_t afMode = 0;
646
647 if (mFacingBack) {
648 switch (type) {
649 case CAMERA3_TEMPLATE_PREVIEW:
650 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
651 break;
652 case CAMERA3_TEMPLATE_STILL_CAPTURE:
653 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
654 break;
655 case CAMERA3_TEMPLATE_VIDEO_RECORD:
656 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
657 break;
658 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
659 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
660 break;
661 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
662 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
663 break;
664 case CAMERA3_TEMPLATE_MANUAL:
665 afMode = ANDROID_CONTROL_AF_MODE_OFF;
666 break;
667 default:
668 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
669 break;
670 }
671 } else {
672 afMode = ANDROID_CONTROL_AF_MODE_OFF;
673 }
674 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
675 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
676
677 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
678 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
679
680 static const uint8_t vstabMode =
681 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
682 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
683 &vstabMode, 1);
684
685 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
686 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
687
688 static const uint8_t lensShadingMapMode =
689 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
690 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
691 &lensShadingMapMode, 1);
692
693 static const uint8_t aberrationMode =
694 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
695 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
696 &aberrationMode, 1);
697
698 static const int32_t testPatternMode =
699 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
700 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
701 }
702
703 mDefaultTemplates[type] = settings.release();
704
705 return mDefaultTemplates[type];
706 }
707
processCaptureRequest(camera3_capture_request * request)708 status_t EmulatedQemuCamera3::processCaptureRequest(
709 camera3_capture_request *request) {
710 Mutex::Autolock l(mLock);
711 status_t res;
712
713 /* Validation */
714
715 if (mStatus < STATUS_READY) {
716 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
717 mStatus);
718 return INVALID_OPERATION;
719 }
720
721 if (request == nullptr) {
722 ALOGE("%s: NULL request!", __FUNCTION__);
723 return BAD_VALUE;
724 }
725
726 uint32_t frameNumber = request->frame_number;
727
728 if (request->settings == nullptr && mPrevSettings.isEmpty()) {
729 ALOGE("%s: Request %d: NULL settings for first request after"
730 "configureStreams()", __FUNCTION__, frameNumber);
731 return BAD_VALUE;
732 }
733
734 if (request->input_buffer != nullptr &&
735 request->input_buffer->stream != mInputStream) {
736 ALOGE("%s: Request %d: Input buffer not from input stream!",
737 __FUNCTION__, frameNumber);
738 ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
739 request->input_buffer->stream, mInputStream);
740 ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
741 request->input_buffer->stream->stream_type,
742 mInputStream ? mInputStream->stream_type : -1);
743
744 return BAD_VALUE;
745 }
746
747 if (request->num_output_buffers < 1 || request->output_buffers == nullptr) {
748 ALOGE("%s: Request %d: No output buffers provided!",
749 __FUNCTION__, frameNumber);
750 return BAD_VALUE;
751 }
752
753 /*
754 * Validate all buffers, starting with input buffer if it's given.
755 */
756
757 ssize_t idx;
758 const camera3_stream_buffer_t *b;
759 if (request->input_buffer != nullptr) {
760 idx = -1;
761 b = request->input_buffer;
762 } else {
763 idx = 0;
764 b = request->output_buffers;
765 }
766 do {
767 PrivateStreamInfo *priv =
768 static_cast<PrivateStreamInfo*>(b->stream->priv);
769 if (priv == nullptr) {
770 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
771 __FUNCTION__, frameNumber, idx);
772 return BAD_VALUE;
773 }
774 if (!priv->alive) {
775 ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
776 __FUNCTION__, frameNumber, idx);
777 return BAD_VALUE;
778 }
779 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
780 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
781 __FUNCTION__, frameNumber, idx);
782 return BAD_VALUE;
783 }
784 if (b->release_fence != -1) {
785 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
786 __FUNCTION__, frameNumber, idx);
787 return BAD_VALUE;
788 }
789 if (b->buffer == nullptr) {
790 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
791 __FUNCTION__, frameNumber, idx);
792 return BAD_VALUE;
793 }
794 idx++;
795 b = &(request->output_buffers[idx]);
796 } while (idx < (ssize_t)request->num_output_buffers);
797
798 // TODO: Validate settings parameters.
799
800 /*
801 * Start processing this request.
802 */
803
804 mStatus = STATUS_ACTIVE;
805
806 CameraMetadata settings;
807
808 if (request->settings == nullptr) {
809 settings.acquire(mPrevSettings);
810 } else {
811 settings = request->settings;
812 }
813
814 res = process3A(settings);
815 if (res != OK) {
816 return res;
817 }
818
819 /*
820 * Get ready for sensor config.
821 */
822 // TODO: We shouldn't need exposureTime or frameDuration for webcams.
823 nsecs_t exposureTime;
824 nsecs_t frameDuration;
825 bool needJpeg = false;
826 camera_metadata_entry_t entry;
827
828 entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
829 exposureTime = (entry.count > 0) ?
830 entry.data.i64[0] :
831 QemuSensor::kExposureTimeRange[0];
832
833 // Note: Camera consumers may rely on there being an exposure
834 // time set in the camera metadata.
835 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
836
837 entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
838 frameDuration = (entry.count > 0) ?
839 entry.data.i64[0] :
840 QemuSensor::kFrameDurationRange[0];
841
842 if (exposureTime > frameDuration) {
843 frameDuration = exposureTime + QemuSensor::kMinVerticalBlank;
844 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
845 }
846
847 static const int32_t sensitivity = QemuSensor::kSensitivityRange[0];
848 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
849
850 static const uint8_t colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
851 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
852
853 static const float colorGains[4] = {
854 1.0f, 1.0f, 1.0f, 1.0f
855 };
856 settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
857
858 static const camera_metadata_rational colorTransform[9] = {
859 {1,1}, {0,1}, {0,1},
860 {0,1}, {1,1}, {0,1},
861 {0,1}, {0,1}, {1,1}
862 };
863 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
864
865 static const camera_metadata_rational neutralColorPoint[3] = {
866 {1,1}, {1,1}, {1,1},
867 };
868 settings.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, neutralColorPoint, 3);
869
870 Buffers *sensorBuffers = new Buffers();
871 HalBufferVector *buffers = new HalBufferVector();
872
873 sensorBuffers->setCapacity(request->num_output_buffers);
874 buffers->setCapacity(request->num_output_buffers);
875
876 /*
877 * Process all the buffers we got for output, constructing internal buffer
878 * structures for them, and lock them for writing.
879 */
880 for (size_t i = 0; i < request->num_output_buffers; ++i) {
881 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
882 StreamBuffer destBuf;
883 destBuf.streamId = kGenericStreamId;
884 destBuf.width = srcBuf.stream->width;
885 destBuf.height = srcBuf.stream->height;
886 // inline with goldfish gralloc
887 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
888 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
889 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
890 destBuf.format = HAL_PIXEL_FORMAT_RGBA_8888;
891 }
892 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
893 destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
894 }
895 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
896 == GRALLOC_USAGE_HW_CAMERA_ZSL) {
897 destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
898 }
899 }
900 }
901 else {
902 destBuf.format = srcBuf.stream->format;
903 }
904
905 destBuf.stride = srcBuf.stream->width;
906 destBuf.dataSpace = srcBuf.stream->data_space;
907 destBuf.buffer = srcBuf.buffer;
908
909 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
910 needJpeg = true;
911 }
912
913 // Wait on fence.
914 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
915 res = bufferAcquireFence->wait(kFenceTimeoutMs);
916 if (res == TIMED_OUT) {
917 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
918 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
919 }
920 if (res == OK) {
921 // Lock buffer for writing.
922 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
923 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
924 android_ycbcr ycbcr = android_ycbcr();
925 res = GrallocModule::getInstance().lock_ycbcr(
926 *(destBuf.buffer),
927 GRALLOC_USAGE_HW_CAMERA_WRITE,
928 0, 0, destBuf.width, destBuf.height,
929 &ycbcr);
930 /*
931 * This is only valid because we know that emulator's
932 * YCbCr_420_888 is really contiguous NV21 under the hood.
933 */
934 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
935 } else {
936 ALOGE("Unexpected private format for flexible YUV: 0x%x",
937 destBuf.format);
938 res = INVALID_OPERATION;
939 }
940 } else {
941 res = GrallocModule::getInstance().lock(
942 *(destBuf.buffer),
943 GRALLOC_USAGE_HW_CAMERA_WRITE,
944 0, 0, destBuf.width, destBuf.height,
945 (void**)&(destBuf.img));
946
947 }
948 if (res != OK) {
949 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
950 __FUNCTION__, frameNumber, i);
951 }
952 }
953
954 if (res != OK) {
955 /*
956 * Either waiting or locking failed. Unlock locked buffers and bail
957 * out.
958 */
959 for (size_t j = 0; j < i; j++) {
960 GrallocModule::getInstance().unlock(
961 *(request->output_buffers[i].buffer));
962 }
963 delete sensorBuffers;
964 delete buffers;
965 return NO_INIT;
966 }
967
968 sensorBuffers->push_back(destBuf);
969 buffers->push_back(srcBuf);
970 }
971
972 /*
973 * Wait for JPEG compressor to not be busy, if needed.
974 */
975 if (needJpeg) {
976 bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
977 if (!ready) {
978 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
979 __FUNCTION__);
980 return NO_INIT;
981 }
982 res = mJpegCompressor->reserve();
983 if (res != OK) {
984 ALOGE("%s: Error managing JPEG compressor resources, can't "
985 "reserve it!", __FUNCTION__);
986 return NO_INIT;
987 }
988 }
989
990 /*
991 * TODO: We shouldn't need to wait for sensor readout with a webcam, because
992 * we might be wasting time.
993 */
994
995 /*
996 * Wait until the in-flight queue has room.
997 */
998 res = mReadoutThread->waitForReadout();
999 if (res != OK) {
1000 ALOGE("%s: Timeout waiting for previous requests to complete!",
1001 __FUNCTION__);
1002 return NO_INIT;
1003 }
1004
1005 /*
1006 * Wait until sensor's ready. This waits for lengthy amounts of time with
1007 * mLock held, but the interface spec is that no other calls may by done to
1008 * the HAL by the framework while process_capture_request is happening.
1009 */
1010 int syncTimeoutCount = 0;
1011 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1012 if (mStatus == STATUS_ERROR) {
1013 return NO_INIT;
1014 }
1015 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1016 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1017 __FUNCTION__, frameNumber,
1018 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1019 return NO_INIT;
1020 }
1021 syncTimeoutCount++;
1022 }
1023
1024 /*
1025 * Configure sensor and queue up the request to the readout thread.
1026 */
1027 mSensor->setFrameDuration(frameDuration);
1028 mSensor->setDestinationBuffers(sensorBuffers);
1029 mSensor->setFrameNumber(request->frame_number);
1030
1031 ReadoutThread::Request r;
1032 r.frameNumber = request->frame_number;
1033 r.settings = settings;
1034 r.sensorBuffers = sensorBuffers;
1035 r.buffers = buffers;
1036
1037 mReadoutThread->queueCaptureRequest(r);
1038 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1039
1040 // Cache the settings for next time.
1041 mPrevSettings.acquire(settings);
1042
1043 return OK;
1044 }
1045
flush()1046 status_t EmulatedQemuCamera3::flush() {
1047 ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1048 return OK;
1049 }
1050
1051 /*****************************************************************************
1052 * Private Methods
1053 ****************************************************************************/
1054
getCameraCapabilities()1055 status_t EmulatedQemuCamera3::getCameraCapabilities() {
1056 const char *key = mFacingBack ? "qemu.sf.back_camera_caps" :
1057 "qemu.sf.front_camera_caps";
1058
1059 /*
1060 * Defined by 'qemu.sf.*_camera_caps' boot property: if the property doesn't
1061 * exist, it is assumed to list FULL.
1062 */
1063 char prop[PROPERTY_VALUE_MAX];
1064 if (property_get(key, prop, nullptr) > 0) {
1065 char *saveptr = nullptr;
1066 char *cap = strtok_r(prop, " ,", &saveptr);
1067 while (cap != nullptr) {
1068 for (int i = 0; i < NUM_CAPABILITIES; ++i) {
1069 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1070 mCapabilities.add(static_cast<AvailableCapabilities>(i));
1071 break;
1072 }
1073 }
1074 cap = strtok_r(nullptr, " ,", &saveptr);
1075 }
1076 if (mCapabilities.size() == 0) {
1077 ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1078 }
1079 }
1080
1081 mCapabilities.add(BACKWARD_COMPATIBLE);
1082
1083 ALOGI("Camera %d capabilities:", mCameraID);
1084 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1085 ALOGI(" %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1086 }
1087
1088 return OK;
1089 }
1090
hasCapability(AvailableCapabilities cap)1091 bool EmulatedQemuCamera3::hasCapability(AvailableCapabilities cap) {
1092 ssize_t idx = mCapabilities.indexOf(cap);
1093 return idx >= 0;
1094 }
1095
constructStaticInfo()1096 status_t EmulatedQemuCamera3::constructStaticInfo() {
1097 CameraMetadata info;
1098 Vector<int32_t> availableCharacteristicsKeys;
1099 status_t res;
1100
1101 #define ADD_STATIC_ENTRY(name, varptr, count) \
1102 availableCharacteristicsKeys.add(name); \
1103 res = info.update(name, varptr, count); \
1104 if (res != OK) return res
1105
1106 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1107 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1108 sensorPhysicalSize, 2);
1109
1110 const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1111 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1112 pixelArray, 2);
1113 const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1114 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1115 activeArray, 4);
1116
1117 static const int32_t orientation = 90; // Aligned with 'long edge'.
1118 ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1119
1120 static const uint8_t timestampSource =
1121 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1122 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1);
1123
1124 if (hasCapability(BACKWARD_COMPATIBLE)) {
1125 static const int32_t availableTestPatternModes[] = {
1126 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
1127 };
1128 ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1129 availableTestPatternModes,
1130 sizeof(availableTestPatternModes) / sizeof(int32_t));
1131 }
1132
1133 /* android.lens */
1134
1135 static const float focalLength = 3.30f; // mm
1136 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1137 &focalLength, 1);
1138
1139 if (hasCapability(BACKWARD_COMPATIBLE)) {
1140 // infinity (fixed focus)
1141 const float minFocusDistance = 0.0;
1142 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1143 &minFocusDistance, 1);
1144
1145 // (fixed focus)
1146 const float hyperFocalDistance = 0.0;
1147 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1148 &minFocusDistance, 1);
1149
1150 static const float aperture = 2.8f;
1151 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1152 &aperture, 1);
1153 static const float filterDensity = 0;
1154 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1155 &filterDensity, 1);
1156 static const uint8_t availableOpticalStabilization =
1157 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1158 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1159 &availableOpticalStabilization, 1);
1160
1161 static const int32_t lensShadingMapSize[] = {1, 1};
1162 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1163 sizeof(lensShadingMapSize) / sizeof(int32_t));
1164
1165 static const uint8_t lensFocusCalibration =
1166 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1167 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1168 &lensFocusCalibration, 1);
1169 }
1170
1171 static const uint8_t lensFacing = mFacingBack ?
1172 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1173 ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1174
1175 /* android.flash */
1176
1177 static const uint8_t flashAvailable = 0;
1178 ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1179
1180 /* android.scaler */
1181
1182 std::vector<int32_t> availableStreamConfigurations;
1183 std::vector<int64_t> availableMinFrameDurations;
1184 std::vector<int64_t> availableStallDurations;
1185
1186 /*
1187 * Build stream configurations, min frame durations, and stall durations for
1188 * all resolutions reported by camera device.
1189 */
1190 for (const auto &res : mResolutions) {
1191 int32_t width = res.first, height = res.second;
1192 std::vector<int32_t> currentResStreamConfigurations = {
1193 HAL_PIXEL_FORMAT_BLOB, width, height,
1194 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1195
1196 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1197 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1198
1199 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1200 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1201
1202 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1203 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1204 };
1205 std::vector<int32_t> currentResMinFrameDurations = {
1206 HAL_PIXEL_FORMAT_BLOB, width, height,
1207 QemuSensor::kFrameDurationRange[0],
1208
1209 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
1210 QemuSensor::kFrameDurationRange[0],
1211
1212 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
1213 QemuSensor::kFrameDurationRange[0],
1214
1215 HAL_PIXEL_FORMAT_RGBA_8888, width, height,
1216 QemuSensor::kFrameDurationRange[0]
1217 };
1218 std::vector<int32_t> currentResStallDurations = {
1219 // We should only introduce stall times with JPEG-compressed frames.
1220 HAL_PIXEL_FORMAT_BLOB, width, height,
1221 QemuSensor::kFrameDurationRange[0],
1222
1223 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
1224
1225 HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
1226
1227 HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
1228 };
1229 availableStreamConfigurations.insert(
1230 availableStreamConfigurations.end(),
1231 currentResStreamConfigurations.begin(),
1232 currentResStreamConfigurations.end());
1233 availableMinFrameDurations.insert(
1234 availableMinFrameDurations.end(),
1235 currentResMinFrameDurations.begin(),
1236 currentResMinFrameDurations.end());
1237 availableStallDurations.insert(
1238 availableStallDurations.end(),
1239 currentResStallDurations.begin(),
1240 currentResStallDurations.end());
1241 }
1242
1243 /*
1244 * Now, if nonempty, add them to the camera's available characteristics.
1245 */
1246 if (availableStreamConfigurations.size() > 0) {
1247 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1248 availableStreamConfigurations.data(),
1249 availableStreamConfigurations.size());
1250 }
1251 if (availableMinFrameDurations.size() > 0) {
1252 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1253 &availableMinFrameDurations[0],
1254 availableMinFrameDurations.size());
1255 }
1256 if (availableStallDurations.size() > 0) {
1257 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1258 &availableStallDurations[0],
1259 availableStallDurations.size());
1260 }
1261
1262 if (hasCapability(BACKWARD_COMPATIBLE)) {
1263 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1264 ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
1265 &croppingType, 1);
1266
1267 static const float maxZoom = 10;
1268 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1269 &maxZoom, 1);
1270 }
1271
1272 /* android.jpeg */
1273
1274 if (hasCapability(BACKWARD_COMPATIBLE)) {
1275 static const int32_t jpegThumbnailSizes[] = {
1276 0, 0,
1277 160, 120,
1278 320, 240
1279 };
1280 ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1281 jpegThumbnailSizes,
1282 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1283
1284 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1285 ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1286 }
1287
1288 /* android.stats */
1289
1290 if (hasCapability(BACKWARD_COMPATIBLE)) {
1291 static const uint8_t availableFaceDetectModes[] = {
1292 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF
1293 };
1294 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1295 availableFaceDetectModes,
1296 sizeof(availableFaceDetectModes));
1297
1298 static const int32_t maxFaceCount = 0;
1299 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1300 &maxFaceCount, 1);
1301
1302 static const uint8_t availableShadingMapModes[] = {
1303 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
1304 };
1305 ADD_STATIC_ENTRY(
1306 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1307 availableShadingMapModes, sizeof(availableShadingMapModes));
1308 }
1309
1310 /* android.sync */
1311
1312 static const int32_t maxLatency =
1313 hasCapability(FULL_LEVEL) ?
1314 ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
1315 ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1316
1317 /* android.control */
1318
1319 if (hasCapability(BACKWARD_COMPATIBLE)) {
1320 static const uint8_t availableControlModes[] = {
1321 ANDROID_CONTROL_MODE_OFF,
1322 ANDROID_CONTROL_MODE_AUTO,
1323 ANDROID_CONTROL_MODE_USE_SCENE_MODE
1324 };
1325 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1326 availableControlModes, sizeof(availableControlModes));
1327 } else {
1328 static const uint8_t availableControlModes[] = {
1329 ANDROID_CONTROL_MODE_AUTO
1330 };
1331 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1332 availableControlModes, sizeof(availableControlModes));
1333 }
1334
1335 static const uint8_t availableSceneModes[] = {
1336 hasCapability(BACKWARD_COMPATIBLE) ?
1337 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
1338 ANDROID_CONTROL_SCENE_MODE_DISABLED
1339 };
1340 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1341 availableSceneModes, sizeof(availableSceneModes));
1342
1343 if (hasCapability(BACKWARD_COMPATIBLE)) {
1344 static const uint8_t availableEffects[] = {
1345 ANDROID_CONTROL_EFFECT_MODE_OFF
1346 };
1347 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1348 availableEffects, sizeof(availableEffects));
1349 }
1350
1351 if (hasCapability(BACKWARD_COMPATIBLE)) {
1352 static const int32_t max3aRegions[] = {
1353 /* AE */ 1,
1354 /* AWB */ 0,
1355 /* AF */ 1
1356 };
1357 ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
1358 max3aRegions,
1359 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1360
1361 static const uint8_t availableAeModes[] = {
1362 ANDROID_CONTROL_AE_MODE_OFF,
1363 ANDROID_CONTROL_AE_MODE_ON
1364 };
1365 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1366 availableAeModes, sizeof(availableAeModes));
1367
1368 static const camera_metadata_rational exposureCompensationStep = {1, 3};
1369 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1370 &exposureCompensationStep, 1);
1371
1372 int32_t exposureCompensationRange[] = {-9, 9};
1373 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1374 exposureCompensationRange,
1375 sizeof(exposureCompensationRange) / sizeof(int32_t));
1376 }
1377
1378 static const int32_t availableTargetFpsRanges[] = {
1379 5, 30, 15, 30, 15, 15, 30, 30
1380 };
1381 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1382 availableTargetFpsRanges,
1383 sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1384
1385 if (hasCapability(BACKWARD_COMPATIBLE)) {
1386 static const uint8_t availableAntibandingModes[] = {
1387 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1388 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1389 };
1390 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1391 availableAntibandingModes, sizeof(availableAntibandingModes));
1392 }
1393
1394 static const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1395
1396 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
1397 &aeLockAvailable, 1);
1398
1399 if (hasCapability(BACKWARD_COMPATIBLE)) {
1400 static const uint8_t availableAwbModes[] = {
1401 ANDROID_CONTROL_AWB_MODE_OFF,
1402 ANDROID_CONTROL_AWB_MODE_AUTO,
1403 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1404 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1405 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1406 ANDROID_CONTROL_AWB_MODE_SHADE,
1407 };
1408 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1409 availableAwbModes, sizeof(availableAwbModes));
1410 }
1411
1412 static const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1413
1414 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
1415 &awbLockAvailable, 1);
1416
1417 static const uint8_t availableAfModesBack[] = {
1418 ANDROID_CONTROL_AF_MODE_OFF
1419 };
1420
1421 static const uint8_t availableAfModesFront[] = {
1422 ANDROID_CONTROL_AF_MODE_OFF
1423 };
1424
1425 if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1426 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1427 availableAfModesBack, sizeof(availableAfModesBack));
1428 } else {
1429 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1430 availableAfModesFront, sizeof(availableAfModesFront));
1431 }
1432
1433 static const uint8_t availableVstabModes[] = {
1434 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
1435 };
1436 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1437 availableVstabModes, sizeof(availableVstabModes));
1438
1439 /* android.colorCorrection */
1440
1441 if (hasCapability(BACKWARD_COMPATIBLE)) {
1442 static const uint8_t availableAberrationModes[] = {
1443 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1444 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1445 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
1446 };
1447 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1448 availableAberrationModes, sizeof(availableAberrationModes));
1449 } else {
1450 static const uint8_t availableAberrationModes[] = {
1451 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1452 };
1453 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1454 availableAberrationModes, sizeof(availableAberrationModes));
1455 }
1456
1457 /* android.edge */
1458
1459 if (hasCapability(BACKWARD_COMPATIBLE)) {
1460 static const uint8_t availableEdgeModes[] = {
1461 ANDROID_EDGE_MODE_OFF,
1462 ANDROID_EDGE_MODE_FAST,
1463 ANDROID_EDGE_MODE_HIGH_QUALITY,
1464 };
1465 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1466 availableEdgeModes, sizeof(availableEdgeModes));
1467 } else {
1468 static const uint8_t availableEdgeModes[] = {
1469 ANDROID_EDGE_MODE_OFF
1470 };
1471 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1472 availableEdgeModes, sizeof(availableEdgeModes));
1473 }
1474
1475 /* android.info */
1476
1477 static const uint8_t supportedHardwareLevel =
1478 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1479 ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1480 &supportedHardwareLevel, /* count */ 1);
1481
1482 /* android.noiseReduction */
1483
1484 if (hasCapability(BACKWARD_COMPATIBLE)) {
1485 static const uint8_t availableNoiseReductionModes[] = {
1486 ANDROID_NOISE_REDUCTION_MODE_OFF,
1487 ANDROID_NOISE_REDUCTION_MODE_FAST,
1488 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
1489 };
1490 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1491 availableNoiseReductionModes,
1492 sizeof(availableNoiseReductionModes));
1493 } else {
1494 static const uint8_t availableNoiseReductionModes[] = {
1495 ANDROID_NOISE_REDUCTION_MODE_OFF
1496 };
1497 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1498 availableNoiseReductionModes,
1499 sizeof(availableNoiseReductionModes));
1500 }
1501
1502 /* android.shading */
1503
1504 if (hasCapability(BACKWARD_COMPATIBLE)) {
1505 static const uint8_t availableShadingModes[] = {
1506 ANDROID_SHADING_MODE_OFF,
1507 ANDROID_SHADING_MODE_FAST,
1508 ANDROID_SHADING_MODE_HIGH_QUALITY
1509 };
1510 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1511 sizeof(availableShadingModes));
1512 } else {
1513 static const uint8_t availableShadingModes[] = {
1514 ANDROID_SHADING_MODE_OFF
1515 };
1516 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1517 sizeof(availableShadingModes));
1518 }
1519
1520 /* android.request */
1521
1522 static const int32_t maxNumOutputStreams[] = {
1523 kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
1524 };
1525 ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
1526 maxNumOutputStreams, 3);
1527
1528 static const uint8_t maxPipelineDepth = kMaxBufferCount;
1529 ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1530
1531 static const int32_t partialResultCount = 1;
1532 ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
1533 &partialResultCount, /* count */ 1);
1534
1535 SortedVector<uint8_t> caps;
1536 for (size_t i = 0; i < mCapabilities.size(); ++i) {
1537 switch (mCapabilities[i]) {
1538 case BACKWARD_COMPATIBLE:
1539 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1540 break;
1541 case PRIVATE_REPROCESSING:
1542 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1543 break;
1544 case READ_SENSOR_SETTINGS:
1545 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1546 break;
1547 case BURST_CAPTURE:
1548 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1549 break;
1550 case YUV_REPROCESSING:
1551 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1552 break;
1553 case CONSTRAINED_HIGH_SPEED_VIDEO:
1554 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1555 break;
1556 default:
1557 // Ignore LEVELs.
1558 break;
1559 }
1560 }
1561 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
1562
1563 // Scan a default request template for included request keys.
1564 Vector<int32_t> availableRequestKeys;
1565 const camera_metadata_t *previewRequest =
1566 constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1567 for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); ++i) {
1568 camera_metadata_ro_entry_t entry;
1569 get_camera_metadata_ro_entry(previewRequest, i, &entry);
1570 availableRequestKeys.add(entry.tag);
1571 }
1572 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
1573 availableRequestKeys.size());
1574
1575 /*
1576 * Add a few more result keys. Must be kept up to date with the various
1577 * places that add these.
1578 */
1579
1580 Vector<int32_t> availableResultKeys(availableRequestKeys);
1581 if (hasCapability(BACKWARD_COMPATIBLE)) {
1582 availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1583 availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1584 availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1585 availableResultKeys.add(ANDROID_FLASH_STATE);
1586 availableResultKeys.add(ANDROID_LENS_STATE);
1587 availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1588 availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1589 availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1590 }
1591
1592 availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1593 availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1594
1595 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
1596 availableResultKeys.size());
1597
1598 // Needs to be last, to collect all the keys set.
1599
1600 availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1601 info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1602 availableCharacteristicsKeys);
1603
1604 mCameraInfo = info.release();
1605
1606 #undef ADD_STATIC_ENTRY
1607 return OK;
1608 }
1609
process3A(CameraMetadata & settings)1610 status_t EmulatedQemuCamera3::process3A(CameraMetadata &settings) {
1611 /**
1612 * Extract top-level 3A controls
1613 */
1614 status_t res;
1615
1616 bool facePriority = false;
1617
1618 camera_metadata_entry e;
1619
1620 e = settings.find(ANDROID_CONTROL_MODE);
1621 if (e.count == 0) {
1622 ALOGE("%s: No control mode entry!", __FUNCTION__);
1623 return BAD_VALUE;
1624 }
1625 uint8_t controlMode = e.data.u8[0];
1626
1627 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1628 mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1629 mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1630 mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1631 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1632 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1633 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1634 update3A(settings);
1635 return OK;
1636 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1637 if (!hasCapability(BACKWARD_COMPATIBLE)) {
1638 ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1639 __FUNCTION__);
1640 return BAD_VALUE;
1641 }
1642
1643 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1644 if (e.count == 0) {
1645 ALOGE("%s: No scene mode entry!", __FUNCTION__);
1646 return BAD_VALUE;
1647 }
1648 uint8_t sceneMode = e.data.u8[0];
1649
1650 switch(sceneMode) {
1651 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1652 mFacePriority = true;
1653 break;
1654 default:
1655 ALOGE("%s: Emulator doesn't support scene mode %d",
1656 __FUNCTION__, sceneMode);
1657 return BAD_VALUE;
1658 }
1659 } else {
1660 mFacePriority = false;
1661 }
1662
1663 // controlMode == AUTO or sceneMode = FACE_PRIORITY
1664 // Process individual 3A controls
1665
1666 res = doFakeAE(settings);
1667 if (res != OK) return res;
1668
1669 res = doFakeAF(settings);
1670 if (res != OK) return res;
1671
1672 res = doFakeAWB(settings);
1673 if (res != OK) return res;
1674
1675 update3A(settings);
1676 return OK;
1677 }
1678
doFakeAE(CameraMetadata & settings)1679 status_t EmulatedQemuCamera3::doFakeAE(CameraMetadata &settings) {
1680 camera_metadata_entry e;
1681
1682 e = settings.find(ANDROID_CONTROL_AE_MODE);
1683 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1684 ALOGE("%s: No AE mode entry!", __FUNCTION__);
1685 return BAD_VALUE;
1686 }
1687 uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1688 mAeMode = aeMode;
1689
1690 switch (aeMode) {
1691 case ANDROID_CONTROL_AE_MODE_OFF:
1692 // AE is OFF
1693 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1694 return OK;
1695 case ANDROID_CONTROL_AE_MODE_ON:
1696 // OK for AUTO modes
1697 break;
1698 default:
1699 // Mostly silently ignore unsupported modes
1700 ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
1701 __FUNCTION__, aeMode);
1702 break;
1703 }
1704
1705 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
1706 bool precaptureTrigger = false;
1707 if (e.count != 0) {
1708 precaptureTrigger =
1709 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
1710 }
1711
1712 if (precaptureTrigger) {
1713 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
1714 } else if (e.count > 0) {
1715 ALOGV("%s: Pre capture trigger was present? %zu",
1716 __FUNCTION__, e.count);
1717 }
1718
1719 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1720 // Run precapture sequence
1721 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1722 mAeCounter = 0;
1723 }
1724
1725 if (mFacePriority) {
1726 mAeTargetExposureTime = kFacePriorityExposureTime;
1727 } else {
1728 mAeTargetExposureTime = kNormalExposureTime;
1729 }
1730
1731 if (mAeCounter > kPrecaptureMinFrames &&
1732 (mAeTargetExposureTime - mAeCurrentExposureTime) <
1733 mAeTargetExposureTime / 10) {
1734 // Done with precapture
1735 mAeCounter = 0;
1736 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1737 } else {
1738 // Converge some more
1739 mAeCurrentExposureTime +=
1740 (mAeTargetExposureTime - mAeCurrentExposureTime) *
1741 kExposureTrackRate;
1742 mAeCounter++;
1743 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1744 }
1745 }
1746 else {
1747 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1748 }
1749
1750 return OK;
1751 }
1752
doFakeAF(CameraMetadata & settings)1753 status_t EmulatedQemuCamera3::doFakeAF(CameraMetadata &settings) {
1754 camera_metadata_entry e;
1755
1756 e = settings.find(ANDROID_CONTROL_AF_MODE);
1757 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1758 ALOGE("%s: No AF mode entry!", __FUNCTION__);
1759 return BAD_VALUE;
1760 }
1761 uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
1762
1763 switch (afMode) {
1764 case ANDROID_CONTROL_AF_MODE_OFF:
1765 case ANDROID_CONTROL_AF_MODE_AUTO:
1766 case ANDROID_CONTROL_AF_MODE_MACRO:
1767 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1768 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1769 // Always report INACTIVE for Qemu Camera
1770 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1771 break;
1772 default:
1773 ALOGE("%s: Emulator doesn't support AF mode %d",
1774 __FUNCTION__, afMode);
1775 return BAD_VALUE;
1776 }
1777
1778 return OK;
1779 }
1780
doFakeAWB(CameraMetadata & settings)1781 status_t EmulatedQemuCamera3::doFakeAWB(CameraMetadata &settings) {
1782 camera_metadata_entry e;
1783
1784 e = settings.find(ANDROID_CONTROL_AWB_MODE);
1785 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1786 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
1787 return BAD_VALUE;
1788 }
1789 uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
1790
1791 // TODO: Add white balance simulation
1792
1793 switch (awbMode) {
1794 case ANDROID_CONTROL_AWB_MODE_OFF:
1795 case ANDROID_CONTROL_AWB_MODE_AUTO:
1796 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
1797 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
1798 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
1799 case ANDROID_CONTROL_AWB_MODE_SHADE:
1800 // Always magically right for Qemu Camera
1801 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
1802 break;
1803 default:
1804 ALOGE("%s: Emulator doesn't support AWB mode %d",
1805 __FUNCTION__, awbMode);
1806 return BAD_VALUE;
1807 }
1808
1809 return OK;
1810 }
1811
update3A(CameraMetadata & settings)1812 void EmulatedQemuCamera3::update3A(CameraMetadata &settings) {
1813 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
1814 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
1815 &mAeCurrentExposureTime, 1);
1816 settings.update(ANDROID_SENSOR_SENSITIVITY,
1817 &mAeCurrentSensitivity, 1);
1818 }
1819
1820 settings.update(ANDROID_CONTROL_AE_STATE,
1821 &mAeState, 1);
1822 settings.update(ANDROID_CONTROL_AF_STATE,
1823 &mAfState, 1);
1824 settings.update(ANDROID_CONTROL_AWB_STATE,
1825 &mAwbState, 1);
1826
1827 uint8_t lensState;
1828 switch (mAfState) {
1829 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1830 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1831 lensState = ANDROID_LENS_STATE_MOVING;
1832 break;
1833 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1834 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1835 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1836 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1837 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1838 default:
1839 lensState = ANDROID_LENS_STATE_STATIONARY;
1840 break;
1841 }
1842 settings.update(ANDROID_LENS_STATE, &lensState, 1);
1843 }
1844
signalReadoutIdle()1845 void EmulatedQemuCamera3::signalReadoutIdle() {
1846 Mutex::Autolock l(mLock);
1847 /*
1848 * Need to check isIdle again because waiting on mLock may have allowed
1849 * something to be placed in the in-flight queue.
1850 */
1851 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
1852 ALOGV("Now idle");
1853 mStatus = STATUS_READY;
1854 }
1855 }
1856
onQemuSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)1857 void EmulatedQemuCamera3::onQemuSensorEvent(uint32_t frameNumber, Event e,
1858 nsecs_t timestamp) {
1859 switch (e) {
1860 case QemuSensor::QemuSensorListener::EXPOSURE_START:
1861 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
1862 __FUNCTION__, frameNumber, timestamp);
1863 // Trigger shutter notify to framework.
1864 camera3_notify_msg_t msg;
1865 msg.type = CAMERA3_MSG_SHUTTER;
1866 msg.message.shutter.frame_number = frameNumber;
1867 msg.message.shutter.timestamp = timestamp;
1868 sendNotify(&msg);
1869 break;
1870 default:
1871 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
1872 e, timestamp);
1873 break;
1874 }
1875 }
1876
ReadoutThread(EmulatedQemuCamera3 * parent)1877 EmulatedQemuCamera3::ReadoutThread::ReadoutThread(EmulatedQemuCamera3 *parent) :
1878 mParent(parent), mJpegWaiting(false) {
1879 ALOGV("%s: Creating readout thread", __FUNCTION__);
1880 }
1881
~ReadoutThread()1882 EmulatedQemuCamera3::ReadoutThread::~ReadoutThread() {
1883 for (List<Request>::iterator i = mInFlightQueue.begin();
1884 i != mInFlightQueue.end(); ++i) {
1885 delete i->buffers;
1886 delete i->sensorBuffers;
1887 }
1888 }
1889
queueCaptureRequest(const Request & r)1890 void EmulatedQemuCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
1891 Mutex::Autolock l(mLock);
1892
1893 mInFlightQueue.push_back(r);
1894 mInFlightSignal.signal();
1895 }
1896
isIdle()1897 bool EmulatedQemuCamera3::ReadoutThread::isIdle() {
1898 Mutex::Autolock l(mLock);
1899 return mInFlightQueue.empty() && !mThreadActive;
1900 }
1901
waitForReadout()1902 status_t EmulatedQemuCamera3::ReadoutThread::waitForReadout() {
1903 status_t res;
1904 Mutex::Autolock l(mLock);
1905 int loopCount = 0;
1906 while (mInFlightQueue.size() >= kMaxQueueSize) {
1907 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1908 if (res != OK && res != TIMED_OUT) {
1909 ALOGE("%s: Error waiting for in-flight queue to shrink",
1910 __FUNCTION__);
1911 return INVALID_OPERATION;
1912 }
1913 if (loopCount == kMaxWaitLoops) {
1914 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
1915 __FUNCTION__);
1916 return TIMED_OUT;
1917 }
1918 loopCount++;
1919 }
1920 return OK;
1921 }
1922
threadLoop()1923 bool EmulatedQemuCamera3::ReadoutThread::threadLoop() {
1924 status_t res;
1925
1926 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
1927
1928 // First wait for a request from the in-flight queue.
1929
1930 if (mCurrentRequest.settings.isEmpty()) {
1931 Mutex::Autolock l(mLock);
1932 if (mInFlightQueue.empty()) {
1933 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1934 if (res == TIMED_OUT) {
1935 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
1936 __FUNCTION__);
1937 return true;
1938 } else if (res != NO_ERROR) {
1939 ALOGE("%s: Error waiting for capture requests: %d",
1940 __FUNCTION__, res);
1941 return false;
1942 }
1943 }
1944 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
1945 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
1946 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
1947 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
1948 mInFlightQueue.erase(mInFlightQueue.begin());
1949 mInFlightSignal.signal();
1950 mThreadActive = true;
1951 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
1952 mCurrentRequest.frameNumber);
1953 }
1954
1955 // Then wait for it to be delivered from the sensor.
1956 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
1957 __FUNCTION__);
1958
1959 nsecs_t captureTime;
1960 bool gotFrame =
1961 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
1962 if (!gotFrame) {
1963 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
1964 __FUNCTION__);
1965 return true;
1966 }
1967
1968 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
1969 mCurrentRequest.frameNumber, captureTime);
1970
1971 /*
1972 * Check if we need to JPEG encode a buffer, and send it for async
1973 * compression if so. Otherwise prepare the buffer for return.
1974 */
1975 bool needJpeg = false;
1976 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
1977 while (buf != mCurrentRequest.buffers->end()) {
1978 bool goodBuffer = true;
1979 if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
1980 buf->stream->data_space != HAL_DATASPACE_DEPTH) {
1981 Mutex::Autolock jl(mJpegLock);
1982 if (mJpegWaiting) {
1983 /*
1984 * This shouldn't happen, because processCaptureRequest should
1985 * be stalling until JPEG compressor is free.
1986 */
1987 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
1988 goodBuffer = false;
1989 }
1990 if (goodBuffer) {
1991 // Compressor takes ownership of sensorBuffers here.
1992 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
1993 this, &(mCurrentRequest.settings));
1994 goodBuffer = (res == OK);
1995 }
1996 if (goodBuffer) {
1997 needJpeg = true;
1998
1999 mJpegHalBuffer = *buf;
2000 mJpegFrameNumber = mCurrentRequest.frameNumber;
2001 mJpegWaiting = true;
2002
2003 mCurrentRequest.sensorBuffers = nullptr;
2004 buf = mCurrentRequest.buffers->erase(buf);
2005
2006 continue;
2007 }
2008 ALOGE("%s: Error compressing output buffer: %s (%d)",
2009 __FUNCTION__, strerror(-res), res);
2010 // Fallthrough for cleanup.
2011 }
2012 GrallocModule::getInstance().unlock(*(buf->buffer));
2013
2014 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2015 CAMERA3_BUFFER_STATUS_ERROR;
2016 buf->acquire_fence = -1;
2017 buf->release_fence = -1;
2018
2019 ++buf;
2020 }
2021
2022 // Construct result for all completed buffers and results.
2023
2024 camera3_capture_result result;
2025
2026 if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2027 static const uint8_t sceneFlicker =
2028 ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2029 mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2030 &sceneFlicker, 1);
2031
2032 static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2033 mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
2034 &flashState, 1);
2035
2036 nsecs_t rollingShutterSkew = 0;
2037 mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2038 &rollingShutterSkew, 1);
2039
2040 float focusRange[] = { 1.0f / 5.0f, 0 }; // 5 m to infinity in focus
2041 mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2042 sizeof(focusRange) / sizeof(float));
2043 }
2044
2045 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2046 &captureTime, 1);
2047
2048
2049 // JPEGs take a stage longer.
2050 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2051 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2052 &pipelineDepth, 1);
2053
2054 result.frame_number = mCurrentRequest.frameNumber;
2055 result.result = mCurrentRequest.settings.getAndLock();
2056 result.num_output_buffers = mCurrentRequest.buffers->size();
2057 result.output_buffers = mCurrentRequest.buffers->array();
2058 result.input_buffer = nullptr;
2059 result.partial_result = 1;
2060
2061 // Go idle if queue is empty, before sending result.
2062 bool signalIdle = false;
2063 {
2064 Mutex::Autolock l(mLock);
2065 if (mInFlightQueue.empty()) {
2066 mThreadActive = false;
2067 signalIdle = true;
2068 }
2069 }
2070 if (signalIdle) mParent->signalReadoutIdle();
2071
2072 // Send it off to the framework.
2073 ALOGVV("%s: ReadoutThread: Send result to framework",
2074 __FUNCTION__);
2075 mParent->sendCaptureResult(&result);
2076
2077 // Clean up.
2078 mCurrentRequest.settings.unlock(result.result);
2079
2080 delete mCurrentRequest.buffers;
2081 mCurrentRequest.buffers = nullptr;
2082 if (!needJpeg) {
2083 delete mCurrentRequest.sensorBuffers;
2084 mCurrentRequest.sensorBuffers = nullptr;
2085 }
2086 mCurrentRequest.settings.clear();
2087
2088 return true;
2089 }
2090
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2091 void EmulatedQemuCamera3::ReadoutThread::onJpegDone(
2092 const StreamBuffer &jpegBuffer, bool success) {
2093 Mutex::Autolock jl(mJpegLock);
2094
2095 GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
2096
2097 mJpegHalBuffer.status = success ?
2098 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2099 mJpegHalBuffer.acquire_fence = -1;
2100 mJpegHalBuffer.release_fence = -1;
2101 mJpegWaiting = false;
2102
2103 camera3_capture_result result;
2104
2105 result.frame_number = mJpegFrameNumber;
2106 result.result = nullptr;
2107 result.num_output_buffers = 1;
2108 result.output_buffers = &mJpegHalBuffer;
2109 result.input_buffer = nullptr;
2110 result.partial_result = 0;
2111
2112 if (!success) {
2113 ALOGE("%s: Compression failure, returning error state buffer to"
2114 " framework", __FUNCTION__);
2115 } else {
2116 ALOGV("%s: Compression complete, returning buffer to framework",
2117 __FUNCTION__);
2118 }
2119
2120 mParent->sendCaptureResult(&result);
2121 }
2122
onJpegInputDone(const StreamBuffer & inputBuffer)2123 void EmulatedQemuCamera3::ReadoutThread::onJpegInputDone(
2124 const StreamBuffer &inputBuffer) {
2125 /*
2126 * Should never get here, since the input buffer has to be returned by end
2127 * of processCaptureRequest.
2128 */
2129 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2130 }
2131
2132 }; // end of namespace android
2133