• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /*
18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19  * functionality of an advanced fake camera.
20  */
21 
22 #include <cstdint>
23 
24 //#define LOG_NDEBUG 0
25 //#define LOG_NNDEBUG 0
26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
27 #include <cutils/properties.h>
28 #include <utils/Log.h>
29 
30 #include <ui/Fence.h>
31 #include "EmulatedCameraFactory.h"
32 #include "EmulatedFakeCamera3.h"
33 #include "GrallocModule.h"
34 
35 #include <cmath>
36 #include "fake-pipeline2/JpegCompressor.h"
37 #include "fake-pipeline2/Sensor.h"
38 
39 #include <vector>
40 
41 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
42 #define ALOGVV ALOGV
43 #else
44 #define ALOGVV(...) ((void)0)
45 #endif
46 
47 namespace android {
48 
49 /**
50  * Constants for camera capabilities
51  */
52 
53 const int64_t USEC = 1000LL;
54 const int64_t MSEC = USEC * 1000LL;
55 // const int64_t SEC = MSEC * 1000LL;
56 
57 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
58     HAL_PIXEL_FORMAT_RAW16, HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888,
59     HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
60     // These are handled by YCbCr_420_888
61     //        HAL_PIXEL_FORMAT_YV12,
62     //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
63     HAL_PIXEL_FORMAT_YCbCr_420_888, HAL_PIXEL_FORMAT_Y16};
64 
65 /**
66  * 3A constants
67  */
68 
69 // Default exposure and gain targets for different scenarios
70 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
71 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
72 const int EmulatedFakeCamera3::kNormalSensitivity = 100;
73 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
74 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
75 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
76 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
77 const float EmulatedFakeCamera3::kExposureWanderMin = -2;
78 const float EmulatedFakeCamera3::kExposureWanderMax = 1;
79 
80 /**
81  * Camera device lifecycle methods
82  */
83 
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module)84 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
85                                          struct hw_module_t *module)
86     : EmulatedCamera3(cameraId, module), mFacingBack(facingBack) {
87   ALOGI("Constructing emulated fake camera 3: ID %d, facing %s", mCameraID,
88         facingBack ? "back" : "front");
89 
90   for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
91     mDefaultTemplates[i] = NULL;
92   }
93 }
94 
~EmulatedFakeCamera3()95 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
96   for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
97     if (mDefaultTemplates[i] != NULL) {
98       free_camera_metadata(mDefaultTemplates[i]);
99     }
100   }
101 }
102 
Initialize(const cvd::CameraDefinition & params)103 status_t EmulatedFakeCamera3::Initialize(const cvd::CameraDefinition &params) {
104   ALOGV("%s: E", __FUNCTION__);
105   status_t res;
106 
107   if (mStatus != STATUS_ERROR) {
108     ALOGE("%s: Already initialized!", __FUNCTION__);
109     return INVALID_OPERATION;
110   }
111 
112   res = getCameraCapabilities();
113   if (res != OK) {
114     ALOGE("%s: Unable to get camera capabilities: %s (%d)", __FUNCTION__,
115           strerror(-res), res);
116     return res;
117   }
118 
119   res = constructStaticInfo(params);
120   if (res != OK) {
121     ALOGE("%s: Unable to allocate static info: %s (%d)", __FUNCTION__,
122           strerror(-res), res);
123     return res;
124   }
125 
126   return EmulatedCamera3::Initialize(params);
127 }
128 
connectCamera(hw_device_t ** device)129 status_t EmulatedFakeCamera3::connectCamera(hw_device_t **device) {
130   ALOGV("%s: E", __FUNCTION__);
131   Mutex::Autolock l(mLock);
132   status_t res;
133 
134   if (mStatus != STATUS_CLOSED) {
135     ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
136     return INVALID_OPERATION;
137   }
138 
139   mSensor = new Sensor(mSensorWidth, mSensorHeight);
140   mSensor->setSensorListener(this);
141 
142   res = mSensor->startUp();
143   if (res != NO_ERROR) return res;
144 
145   mReadoutThread = new ReadoutThread(this);
146   mJpegCompressor = new JpegCompressor();
147 
148   res = mReadoutThread->run("EmuCam3::readoutThread");
149   if (res != NO_ERROR) return res;
150 
151   // Initialize fake 3A
152 
153   mControlMode = ANDROID_CONTROL_MODE_AUTO;
154   mFacePriority = false;
155   mAeMode = ANDROID_CONTROL_AE_MODE_ON;
156   mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
157   mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
158   mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
159   mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
160   mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
161   mAeCounter = 0;
162   mAeTargetExposureTime = kNormalExposureTime;
163   mAeCurrentExposureTime = kNormalExposureTime;
164   mAeCurrentSensitivity = kNormalSensitivity;
165 
166   return EmulatedCamera3::connectCamera(device);
167 }
168 
closeCamera()169 status_t EmulatedFakeCamera3::closeCamera() {
170   ALOGV("%s: E", __FUNCTION__);
171   status_t res;
172   {
173     Mutex::Autolock l(mLock);
174     if (mStatus == STATUS_CLOSED) return OK;
175 
176     res = mSensor->shutDown();
177     if (res != NO_ERROR) {
178       ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
179       return res;
180     }
181     mSensor.clear();
182 
183     mReadoutThread->requestExit();
184   }
185 
186   mReadoutThread->join();
187 
188   {
189     Mutex::Autolock l(mLock);
190     // Clear out private stream information
191     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
192       PrivateStreamInfo *privStream =
193           static_cast<PrivateStreamInfo *>((*s)->priv);
194       delete privStream;
195       (*s)->priv = NULL;
196     }
197     mStreams.clear();
198     mReadoutThread.clear();
199   }
200 
201   return EmulatedCamera3::closeCamera();
202 }
203 
getCameraInfo(struct camera_info * info)204 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
205   info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
206   info->orientation =
207       EmulatedCameraFactory::Instance().getFakeCameraOrientation();
208 #if VSOC_PLATFORM_SDK_AFTER(L_MR1)
209   info->resource_cost = 100;
210   info->conflicting_devices = NULL;
211   info->conflicting_devices_length = 0;
212 #endif
213   return EmulatedCamera3::getCameraInfo(info);
214 }
215 
setTorchMode(bool enabled)216 status_t EmulatedFakeCamera3::setTorchMode(bool enabled) {
217   if (!mFacingBack) {
218     ALOGE("%s: Front camera does not have flash unit", __FUNCTION__);
219     return INVALID_OPERATION;
220   }
221   EmulatedCameraFactory::Instance().onTorchModeStatusChanged(
222       mCameraID, enabled ? TORCH_MODE_STATUS_AVAILABLE_ON
223                          : TORCH_MODE_STATUS_AVAILABLE_OFF);
224   return NO_ERROR;
225 }
226 
227 /**
228  * Camera3 interface methods
229  */
230 
configureStreams(camera3_stream_configuration * streamList)231 status_t EmulatedFakeCamera3::configureStreams(
232     camera3_stream_configuration *streamList) {
233   Mutex::Autolock l(mLock);
234   ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
235 
236   if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
237     ALOGE("%s: Cannot configure streams in state %d", __FUNCTION__, mStatus);
238     return NO_INIT;
239   }
240 
241   /**
242    * Sanity-check input list.
243    */
244   if (streamList == NULL) {
245     ALOGE("%s: NULL stream configuration", __FUNCTION__);
246     return BAD_VALUE;
247   }
248 
249   if (streamList->streams == NULL) {
250     ALOGE("%s: NULL stream list", __FUNCTION__);
251     return BAD_VALUE;
252   }
253 
254   if (streamList->num_streams < 1) {
255     ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
256           streamList->num_streams);
257     return BAD_VALUE;
258   }
259 
260   camera3_stream_t *inputStream = NULL;
261   for (size_t i = 0; i < streamList->num_streams; i++) {
262     camera3_stream_t *newStream = streamList->streams[i];
263 
264     if (newStream == NULL) {
265       ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
266       return BAD_VALUE;
267     }
268 
269     ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
270           __FUNCTION__, newStream, i, newStream->stream_type, newStream->usage,
271           newStream->format);
272 
273     if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
274         newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
275       if (inputStream != NULL) {
276         ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
277         return BAD_VALUE;
278       }
279       inputStream = newStream;
280     }
281 
282     bool validFormat = false;
283     for (size_t f = 0;
284          f < sizeof(kAvailableFormats) / sizeof(kAvailableFormats[0]); f++) {
285       if (newStream->format == kAvailableFormats[f]) {
286         validFormat = true;
287         break;
288       }
289     }
290     if (!validFormat) {
291       ALOGE("%s: Unsupported stream format 0x%x requested", __FUNCTION__,
292             newStream->format);
293       return BAD_VALUE;
294     }
295   }
296   mInputStream = inputStream;
297 
298   /**
299    * Initially mark all existing streams as not alive
300    */
301   for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
302     PrivateStreamInfo *privStream =
303         static_cast<PrivateStreamInfo *>((*s)->priv);
304     privStream->alive = false;
305   }
306 
307   /**
308    * Find new streams and mark still-alive ones
309    */
310   for (size_t i = 0; i < streamList->num_streams; i++) {
311     camera3_stream_t *newStream = streamList->streams[i];
312     if (newStream->priv == NULL) {
313       // New stream, construct info
314       PrivateStreamInfo *privStream = new PrivateStreamInfo();
315       privStream->alive = true;
316 
317       newStream->max_buffers = kMaxBufferCount;
318       newStream->priv = privStream;
319       mStreams.push_back(newStream);
320     } else {
321       // Existing stream, mark as still alive.
322       PrivateStreamInfo *privStream =
323           static_cast<PrivateStreamInfo *>(newStream->priv);
324       privStream->alive = true;
325     }
326     // Always update usage and max buffers
327     newStream->max_buffers = kMaxBufferCount;
328     switch (newStream->stream_type) {
329       case CAMERA3_STREAM_OUTPUT:
330         newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
331         break;
332       case CAMERA3_STREAM_INPUT:
333         newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
334         break;
335       case CAMERA3_STREAM_BIDIRECTIONAL:
336         newStream->usage =
337             GRALLOC_USAGE_HW_CAMERA_READ | GRALLOC_USAGE_HW_CAMERA_WRITE;
338         break;
339     }
340   }
341 
342   /**
343    * Reap the dead streams
344    */
345   for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
346     PrivateStreamInfo *privStream =
347         static_cast<PrivateStreamInfo *>((*s)->priv);
348     if (!privStream->alive) {
349       (*s)->priv = NULL;
350       delete privStream;
351       s = mStreams.erase(s);
352     } else {
353       ++s;
354     }
355   }
356 
357   /**
358    * Can't reuse settings across configure call
359    */
360   mPrevSettings.clear();
361 
362   return OK;
363 }
364 
registerStreamBuffers(const camera3_stream_buffer_set *)365 status_t EmulatedFakeCamera3::registerStreamBuffers(
366     const camera3_stream_buffer_set * /*bufferSet*/) {
367   ALOGV("%s: E", __FUNCTION__);
368   Mutex::Autolock l(mLock);
369 
370   // Should not be called in HAL versions >= 3.2
371 
372   ALOGE("%s: Should not be invoked on new HALs!", __FUNCTION__);
373   return NO_INIT;
374 }
375 
constructDefaultRequestSettings(int type)376 const camera_metadata_t *EmulatedFakeCamera3::constructDefaultRequestSettings(
377     int type) {
378   ALOGV("%s: E", __FUNCTION__);
379   Mutex::Autolock l(mLock);
380 
381   if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
382     ALOGE("%s: Unknown request settings template: %d", __FUNCTION__, type);
383     return NULL;
384   }
385 
386   if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
387     ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
388           __FUNCTION__, type);
389     return NULL;
390   }
391 
392   /**
393    * Cache is not just an optimization - pointer returned has to live at
394    * least as long as the camera device instance does.
395    */
396   if (mDefaultTemplates[type] != NULL) {
397     return mDefaultTemplates[type];
398   }
399 
400   CameraMetadata settings;
401 
402   /** android.request */
403 
404   static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
405   settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
406 
407   static const int32_t id = 0;
408   settings.update(ANDROID_REQUEST_ID, &id, 1);
409 
410   static const int32_t frameCount = 0;
411   settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
412 
413   /** android.lens */
414 
415   static const float focalLength = 5.0f;
416   settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
417 
418   if (hasCapability(BACKWARD_COMPATIBLE)) {
419     static const float focusDistance = 0;
420     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
421 
422     static const float aperture = 2.8f;
423     settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
424 
425     static const float filterDensity = 0;
426     settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
427 
428     static const uint8_t opticalStabilizationMode =
429         ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
430     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
431                     &opticalStabilizationMode, 1);
432 
433     // FOCUS_RANGE set only in frame
434   }
435 
436   /** android.sensor */
437 
438   if (hasCapability(MANUAL_SENSOR)) {
439     static const int64_t exposureTime = 10 * MSEC;
440     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
441 
442     static const int64_t frameDuration = 33333333L;  // 1/30 s
443     settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
444 
445     static const int32_t sensitivity = 100;
446     settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
447   }
448 
449   // TIMESTAMP set only in frame
450 
451   /** android.flash */
452 
453   if (hasCapability(BACKWARD_COMPATIBLE)) {
454     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
455     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
456 
457     static const uint8_t flashPower = 10;
458     settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
459 
460     static const int64_t firingTime = 0;
461     settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
462   }
463 
464   /** Processing block modes */
465   if (hasCapability(MANUAL_POST_PROCESSING)) {
466     uint8_t hotPixelMode = 0;
467     uint8_t demosaicMode = 0;
468     uint8_t noiseMode = 0;
469     uint8_t shadingMode = 0;
470     uint8_t colorMode = 0;
471     uint8_t tonemapMode = 0;
472     uint8_t edgeMode = 0;
473     switch (type) {
474       case CAMERA3_TEMPLATE_STILL_CAPTURE:
475         // fall-through
476       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
477         // fall-through
478       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
479         hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
480         demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
481         noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
482         shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
483         colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
484         tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
485         edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
486         break;
487       case CAMERA3_TEMPLATE_PREVIEW:
488         // fall-through
489       case CAMERA3_TEMPLATE_VIDEO_RECORD:
490         // fall-through
491       default:
492         hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
493         demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
494         noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
495         shadingMode = ANDROID_SHADING_MODE_FAST;
496         colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
497         tonemapMode = ANDROID_TONEMAP_MODE_FAST;
498         edgeMode = ANDROID_EDGE_MODE_FAST;
499         break;
500     }
501     settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
502     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
503     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
504     settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
505     settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
506     settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
507     settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
508   }
509 
510   /** android.colorCorrection */
511 
512   if (hasCapability(MANUAL_POST_PROCESSING)) {
513     static const camera_metadata_rational colorTransform[9] = {
514         {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
515     settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
516 
517     static const float colorGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
518     settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
519   }
520 
521   /** android.tonemap */
522 
523   if (hasCapability(MANUAL_POST_PROCESSING)) {
524     static const float tonemapCurve[4] = {0.f, 0.f, 1.f, 1.f};
525     settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
526     settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
527     settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
528   }
529 
530   /** android.scaler */
531   if (hasCapability(BACKWARD_COMPATIBLE)) {
532     static const int32_t cropRegion[4] = {0, 0, mSensorWidth, mSensorHeight};
533     settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
534   }
535 
536   /** android.jpeg */
537   if (hasCapability(BACKWARD_COMPATIBLE)) {
538     static const uint8_t jpegQuality = 80;
539     settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
540 
541     static const int32_t thumbnailSize[2] = {640, 480};
542     settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
543 
544     static const uint8_t thumbnailQuality = 80;
545     settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
546 
547     static const double gpsCoordinates[2] = {0, 0};
548     settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
549 
550     static const uint8_t gpsProcessingMethod[32] = "None";
551     settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod,
552                     32);
553 
554     static const int64_t gpsTimestamp = 0;
555     settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
556 
557     static const int32_t jpegOrientation = 0;
558     settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
559   }
560 
561   /** android.stats */
562 
563   if (hasCapability(BACKWARD_COMPATIBLE)) {
564     static const uint8_t faceDetectMode =
565         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
566     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
567 
568     static const uint8_t hotPixelMapMode =
569         ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
570     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
571   }
572 
573   // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
574   // sharpnessMap only in frames
575 
576   /** android.control */
577 
578   uint8_t controlIntent = 0;
579   switch (type) {
580     case CAMERA3_TEMPLATE_PREVIEW:
581       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
582       break;
583     case CAMERA3_TEMPLATE_STILL_CAPTURE:
584       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
585       break;
586     case CAMERA3_TEMPLATE_VIDEO_RECORD:
587       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
588       break;
589     case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
590       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
591       break;
592     case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
593       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
594       break;
595     case CAMERA3_TEMPLATE_MANUAL:
596       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
597       break;
598     default:
599       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
600       break;
601   }
602   settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
603 
604   const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL)
605                                   ? ANDROID_CONTROL_MODE_OFF
606                                   : ANDROID_CONTROL_MODE_AUTO;
607   settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
608 
609   int32_t aeTargetFpsRange[2] = {5, 30};
610   if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
611       type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
612     aeTargetFpsRange[0] = 30;
613   }
614   settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
615 
616   if (hasCapability(BACKWARD_COMPATIBLE)) {
617     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
618     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
619 
620     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
621     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
622 
623     const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL)
624                                ? ANDROID_CONTROL_AE_MODE_OFF
625                                : ANDROID_CONTROL_AE_MODE_ON;
626     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
627 
628     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
629     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
630 
631     static const int32_t controlRegions[5] = {0, 0, 0, 0, 0};
632     settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
633 
634     static const int32_t aeExpCompensation = 0;
635     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
636                     &aeExpCompensation, 1);
637 
638     static const uint8_t aeAntibandingMode =
639         ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
640     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
641 
642     static const uint8_t aePrecaptureTrigger =
643         ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
644     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger,
645                     1);
646 
647     const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL)
648                                 ? ANDROID_CONTROL_AWB_MODE_OFF
649                                 : ANDROID_CONTROL_AWB_MODE_AUTO;
650     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
651 
652     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
653     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
654 
655     uint8_t afMode = 0;
656 
657     if (mFacingBack) {
658       switch (type) {
659         case CAMERA3_TEMPLATE_PREVIEW:
660           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
661           break;
662         case CAMERA3_TEMPLATE_STILL_CAPTURE:
663           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
664           break;
665         case CAMERA3_TEMPLATE_VIDEO_RECORD:
666           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
667           break;
668         case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
669           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
670           break;
671         case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
672           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
673           break;
674         case CAMERA3_TEMPLATE_MANUAL:
675           afMode = ANDROID_CONTROL_AF_MODE_OFF;
676           break;
677         default:
678           afMode = ANDROID_CONTROL_AF_MODE_AUTO;
679           break;
680       }
681     } else {
682       afMode = ANDROID_CONTROL_AF_MODE_OFF;
683     }
684     settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
685 
686     settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
687 
688     static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
689     settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
690 
691     static const uint8_t vstabMode =
692         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
693     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
694 
695     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
696     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
697 
698     static const uint8_t lensShadingMapMode =
699         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
700     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
701                     &lensShadingMapMode, 1);
702 
703     static const uint8_t aberrationMode =
704         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
705     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode,
706                     1);
707 
708     static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
709     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
710   }
711 
712   mDefaultTemplates[type] = settings.release();
713 
714   return mDefaultTemplates[type];
715 }
716 
processCaptureRequest(camera3_capture_request * request)717 status_t EmulatedFakeCamera3::processCaptureRequest(
718     camera3_capture_request *request) {
719   Mutex::Autolock l(mLock);
720   status_t res;
721 
722   /** Validation */
723 
724   if (mStatus < STATUS_READY) {
725     ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
726           mStatus);
727     return INVALID_OPERATION;
728   }
729 
730   if (request == NULL) {
731     ALOGE("%s: NULL request!", __FUNCTION__);
732     return BAD_VALUE;
733   }
734 
735   uint32_t frameNumber = request->frame_number;
736 
737   if (request->settings == NULL && mPrevSettings.isEmpty()) {
738     ALOGE(
739         "%s: Request %d: NULL settings for first request after"
740         "configureStreams()",
741         __FUNCTION__, frameNumber);
742     return BAD_VALUE;
743   }
744 
745   if (request->input_buffer != NULL &&
746       request->input_buffer->stream != mInputStream) {
747     ALOGE("%s: Request %d: Input buffer not from input stream!", __FUNCTION__,
748           frameNumber);
749     ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
750           request->input_buffer->stream, mInputStream);
751     ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
752           request->input_buffer->stream->stream_type,
753           mInputStream ? mInputStream->stream_type : -1);
754 
755     return BAD_VALUE;
756   }
757 
758   if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
759     ALOGE("%s: Request %d: No output buffers provided!", __FUNCTION__,
760           frameNumber);
761     return BAD_VALUE;
762   }
763 
764   // Validate all buffers, starting with input buffer if it's given
765 
766   ssize_t idx;
767   const camera3_stream_buffer_t *b;
768   if (request->input_buffer != NULL) {
769     idx = -1;
770     b = request->input_buffer;
771   } else {
772     idx = 0;
773     b = request->output_buffers;
774   }
775   do {
776     PrivateStreamInfo *priv = static_cast<PrivateStreamInfo *>(b->stream->priv);
777     if (priv == NULL) {
778       ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", __FUNCTION__,
779             frameNumber, idx);
780       return BAD_VALUE;
781     }
782     if (!priv->alive) {
783       ALOGE("%s: Request %d: Buffer %zu: Dead stream!", __FUNCTION__,
784             frameNumber, idx);
785       return BAD_VALUE;
786     }
787     if (b->status != CAMERA3_BUFFER_STATUS_OK) {
788       ALOGE("%s: Request %d: Buffer %zu: Status not OK!", __FUNCTION__,
789             frameNumber, idx);
790       return BAD_VALUE;
791     }
792     if (b->release_fence != -1) {
793       ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", __FUNCTION__,
794             frameNumber, idx);
795       return BAD_VALUE;
796     }
797     if (b->buffer == NULL) {
798       ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", __FUNCTION__,
799             frameNumber, idx);
800       return BAD_VALUE;
801     }
802     idx++;
803     b = &(request->output_buffers[idx]);
804   } while (idx < (ssize_t)request->num_output_buffers);
805 
806   // TODO: Validate settings parameters
807 
808   /**
809    * Start processing this request
810    */
811 
812   mStatus = STATUS_ACTIVE;
813 
814   CameraMetadata settings;
815 
816   if (request->settings == NULL) {
817     settings.acquire(mPrevSettings);
818   } else {
819     settings = request->settings;
820   }
821 
822   res = process3A(settings);
823   if (res != OK) {
824     return res;
825   }
826 
827   // TODO: Handle reprocessing
828 
829   /**
830    * Get ready for sensor config
831    */
832 
833   nsecs_t exposureTime;
834   nsecs_t frameDuration;
835   uint32_t sensitivity;
836   bool needJpeg = false;
837   camera_metadata_entry_t entry;
838 
839   entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
840   exposureTime =
841       (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
842   entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
843   frameDuration =
844       (entry.count > 0) ? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
845   entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
846   sensitivity =
847       (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
848 
849   if (exposureTime > frameDuration) {
850     frameDuration = exposureTime + Sensor::kMinVerticalBlank;
851     settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
852   }
853 
854   Buffers *sensorBuffers = new Buffers();
855   HalBufferVector *buffers = new HalBufferVector();
856 
857   sensorBuffers->setCapacity(request->num_output_buffers);
858   buffers->setCapacity(request->num_output_buffers);
859 
860   // Process all the buffers we got for output, constructing internal buffer
861   // structures for them, and lock them for writing.
862   for (size_t i = 0; i < request->num_output_buffers; i++) {
863     const camera3_stream_buffer &srcBuf = request->output_buffers[i];
864     StreamBuffer destBuf;
865     destBuf.streamId = kGenericStreamId;
866     destBuf.width = srcBuf.stream->width;
867     destBuf.height = srcBuf.stream->height;
868     // For GCE, IMPLEMENTATION_DEFINED is always RGBx_8888
869     destBuf.format =
870         (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)
871             ? HAL_PIXEL_FORMAT_RGBA_8888
872             : srcBuf.stream->format;
873     destBuf.stride = srcBuf.stream->width;
874     destBuf.dataSpace = srcBuf.stream->data_space;
875     destBuf.buffer = srcBuf.buffer;
876 
877     if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
878       needJpeg = true;
879     }
880 
881     // Wait on fence
882     sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
883     res = bufferAcquireFence->wait(kFenceTimeoutMs);
884     if (res == TIMED_OUT) {
885       ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
886             __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
887     }
888     if (res == OK) {
889       // Lock buffer for writing
890       if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
891         if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
892           android_ycbcr ycbcr = android_ycbcr();
893           res = GrallocModule::getInstance().lock_ycbcr(
894               *(destBuf.buffer), GRALLOC_USAGE_HW_CAMERA_WRITE, 0, 0,
895               destBuf.width, destBuf.height, &ycbcr);
896           // This is only valid because we know that emulator's
897           // YCbCr_420_888 is really contiguous NV21 under the hood
898           destBuf.img = static_cast<uint8_t *>(ycbcr.y);
899         } else {
900           ALOGE("Unexpected private format for flexible YUV: 0x%x",
901                 destBuf.format);
902           res = INVALID_OPERATION;
903         }
904       } else {
905         res = GrallocModule::getInstance().lock(
906             *(destBuf.buffer), GRALLOC_USAGE_HW_CAMERA_WRITE, 0, 0,
907             destBuf.width, destBuf.height, (void **)&(destBuf.img));
908       }
909       if (res != OK) {
910         ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", __FUNCTION__,
911               frameNumber, i);
912       }
913     }
914 
915     if (res != OK) {
916       // Either waiting or locking failed. Unlock locked buffers and bail
917       // out.
918       for (size_t j = 0; j < i; j++) {
919         GrallocModule::getInstance().unlock(
920             *(request->output_buffers[i].buffer));
921       }
922       delete sensorBuffers;
923       delete buffers;
924       return NO_INIT;
925     }
926 
927     sensorBuffers->push_back(destBuf);
928     buffers->push_back(srcBuf);
929   }
930 
931   /**
932    * Wait for JPEG compressor to not be busy, if needed
933    */
934   if (needJpeg) {
935     bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
936     if (!ready) {
937       ALOGE("%s: Timeout waiting for JPEG compression to complete!",
938             __FUNCTION__);
939       return NO_INIT;
940     }
941     res = mJpegCompressor->reserve();
942     if (res != OK) {
943       ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!",
944             __FUNCTION__);
945       return NO_INIT;
946     }
947   }
948 
949   /**
950    * Wait until the in-flight queue has room
951    */
952   res = mReadoutThread->waitForReadout();
953   if (res != OK) {
954     ALOGE("%s: Timeout waiting for previous requests to complete!",
955           __FUNCTION__);
956     return NO_INIT;
957   }
958 
959   /**
960    * Wait until sensor's ready. This waits for lengthy amounts of time with
961    * mLock held, but the interface spec is that no other calls may by done to
962    * the HAL by the framework while process_capture_request is happening.
963    */
964   int syncTimeoutCount = 0;
965   while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
966     if (mStatus == STATUS_ERROR) {
967       return NO_INIT;
968     }
969     if (syncTimeoutCount == kMaxSyncTimeoutCount) {
970       ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
971             __FUNCTION__, frameNumber,
972             kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
973       return NO_INIT;
974     }
975     syncTimeoutCount++;
976   }
977 
978   /**
979    * Configure sensor and queue up the request to the readout thread
980    */
981   mSensor->setExposureTime(exposureTime);
982   mSensor->setFrameDuration(frameDuration);
983   mSensor->setSensitivity(sensitivity);
984   mSensor->setDestinationBuffers(sensorBuffers);
985   mSensor->setFrameNumber(request->frame_number);
986 
987   ReadoutThread::Request r;
988   r.frameNumber = request->frame_number;
989   r.settings = settings;
990   r.sensorBuffers = sensorBuffers;
991   r.buffers = buffers;
992 
993   mReadoutThread->queueCaptureRequest(r);
994   ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
995 
996   // Cache the settings for next time
997   mPrevSettings.acquire(settings);
998 
999   return OK;
1000 }
1001 
flush()1002 status_t EmulatedFakeCamera3::flush() {
1003   ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1004   return OK;
1005 }
1006 
1007 /** Debug methods */
1008 
dump(int)1009 void EmulatedFakeCamera3::dump(int /*fd*/) {}
1010 
1011 /**
1012  * Private methods
1013  */
1014 
getCameraCapabilities()1015 status_t EmulatedFakeCamera3::getCameraCapabilities() {
1016   const char *key =
1017       mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
1018 
1019   /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
1020    * property doesn't exist, it is assumed to list FULL. */
1021   char prop[PROPERTY_VALUE_MAX];
1022   if (property_get(key, prop, NULL) > 0) {
1023     char *saveptr = nullptr;
1024     char *cap = strtok_r(prop, " ,", &saveptr);
1025     while (cap != NULL) {
1026       for (int i = 0; i < NUM_CAPABILITIES; i++) {
1027         if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1028           mCapabilities.add(static_cast<AvailableCapabilities>(i));
1029           break;
1030         }
1031       }
1032       cap = strtok_r(NULL, " ,", &saveptr);
1033     }
1034     if (mCapabilities.size() == 0) {
1035       ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1036     }
1037   }
1038   // Default to FULL_LEVEL plus RAW if nothing is defined
1039   if (mCapabilities.size() == 0) {
1040     mCapabilities.add(FULL_LEVEL);
1041     mCapabilities.add(RAW);
1042   }
1043 
1044   // Add level-based caps
1045   if (hasCapability(FULL_LEVEL)) {
1046     mCapabilities.add(BURST_CAPTURE);
1047     mCapabilities.add(READ_SENSOR_SETTINGS);
1048     mCapabilities.add(MANUAL_SENSOR);
1049     mCapabilities.add(MANUAL_POST_PROCESSING);
1050   };
1051 
1052   // Backwards-compatible is required for most other caps
1053   // Not required for DEPTH_OUTPUT, though.
1054   if (hasCapability(BURST_CAPTURE) || hasCapability(READ_SENSOR_SETTINGS) ||
1055       hasCapability(RAW) || hasCapability(MANUAL_SENSOR) ||
1056       hasCapability(MANUAL_POST_PROCESSING) ||
1057       hasCapability(PRIVATE_REPROCESSING) || hasCapability(YUV_REPROCESSING) ||
1058       hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
1059     mCapabilities.add(BACKWARD_COMPATIBLE);
1060   }
1061 
1062   ALOGI("Camera %d capabilities:", mCameraID);
1063   for (size_t i = 0; i < mCapabilities.size(); i++) {
1064     ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1065   }
1066 
1067   return OK;
1068 }
1069 
hasCapability(AvailableCapabilities cap)1070 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
1071   ssize_t idx = mCapabilities.indexOf(cap);
1072   return idx >= 0;
1073 }
1074 
constructStaticInfo(const cvd::CameraDefinition & params)1075 status_t EmulatedFakeCamera3::constructStaticInfo(
1076     const cvd::CameraDefinition &params) {
1077   CameraMetadata info;
1078   Vector<int32_t> availableCharacteristicsKeys;
1079   status_t res;
1080 
1081   int32_t width = 0, height = 0;
1082 
1083   /* TODO(ender): this currently supports only maximum resolution. */
1084   for (size_t index = 0; index < params.resolutions.size(); ++index) {
1085     if (width <= params.resolutions[index].width &&
1086         height <= params.resolutions[index].height) {
1087       width = params.resolutions[index].width;
1088       height = params.resolutions[index].height;
1089     }
1090   }
1091 
1092   if (width < 640 || height < 480) {
1093     width = 640;
1094     height = 480;
1095   }
1096 
1097   mSensorWidth = width;
1098   mSensorHeight = height;
1099 
1100 #define ADD_STATIC_ENTRY(name, varptr, count) \
1101   availableCharacteristicsKeys.add(name);     \
1102   res = info.update(name, varptr, count);     \
1103   if (res != OK) return res
1104 
1105   // android.sensor
1106 
1107   if (hasCapability(MANUAL_SENSOR)) {
1108     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1109                      Sensor::kExposureTimeRange, 2);
1110 
1111     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1112                      &Sensor::kFrameDurationRange[1], 1);
1113 
1114     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1115                      Sensor::kSensitivityRange,
1116                      sizeof(Sensor::kSensitivityRange) / sizeof(int32_t));
1117 
1118     ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1119                      &Sensor::kSensitivityRange[1], 1);
1120   }
1121 
1122   static const float sensorPhysicalSize[2] = {3.20f, 2.40f};  // mm
1123   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorPhysicalSize, 2);
1124 
1125   const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1126   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArray, 2);
1127   const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1128   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArray, 4);
1129 
1130   static const int32_t orientation = 90;  // Aligned with 'long edge'
1131   ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1132 
1133   static const uint8_t timestampSource =
1134       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
1135   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1136 
1137   if (hasCapability(RAW)) {
1138     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1139                      &Sensor::kColorFilterArrangement, 1);
1140 
1141     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1142                      (int32_t *)&Sensor::kMaxRawValue, 1);
1143 
1144     static const int32_t blackLevelPattern[4] = {
1145         (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1146         (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel};
1147     ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, blackLevelPattern,
1148                      sizeof(blackLevelPattern) / sizeof(int32_t));
1149   }
1150 
1151   if (hasCapability(BACKWARD_COMPATIBLE)) {
1152     static const int32_t availableTestPatternModes[] = {
1153         ANDROID_SENSOR_TEST_PATTERN_MODE_OFF};
1154     ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1155                      availableTestPatternModes,
1156                      sizeof(availableTestPatternModes) / sizeof(int32_t));
1157   }
1158 
1159   // android.lens
1160 
1161   static const float focalLength = 3.30f;  // mm
1162   ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &focalLength, 1);
1163 
1164   if (hasCapability(BACKWARD_COMPATIBLE)) {
1165     // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1166     const float minFocusDistance = mFacingBack ? 1.0 / 0.05 : 0.0;
1167     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1168                      &minFocusDistance, 1);
1169 
1170     // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1171     const float hyperFocalDistance = mFacingBack ? 1.0 / 5.0 : 0.0;
1172     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, &hyperFocalDistance,
1173                      1);
1174 
1175     static const float aperture = 2.8f;
1176     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES, &aperture, 1);
1177     static const float filterDensity = 0;
1178     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1179                      &filterDensity, 1);
1180     static const uint8_t availableOpticalStabilization =
1181         ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1182     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1183                      &availableOpticalStabilization, 1);
1184 
1185     static const int32_t lensShadingMapSize[] = {1, 1};
1186     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1187                      sizeof(lensShadingMapSize) / sizeof(int32_t));
1188 
1189     static const uint8_t lensFocusCalibration =
1190         ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1191     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1192                      &lensFocusCalibration, 1);
1193   }
1194 
1195   if (hasCapability(DEPTH_OUTPUT)) {
1196     // These could be included for non-DEPTH capability as well, but making this
1197     // variable for testing coverage
1198 
1199     // 90 degree rotation to align with long edge of a phone device that's by
1200     // default portrait
1201     static const float qO[] = {0.707107f, 0.f, 0.f, 0.707107f};
1202 
1203     // Either a 180-degree rotation for back-facing, or no rotation for
1204     // front-facing
1205     const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0,
1206                         (mFacingBack ? 0.f : 1.f)};
1207 
1208     // Quarternion product, orientation change then facing
1209     const float lensPoseRotation[] = {
1210         qO[0] * qF[0] - qO[1] * qF[1] - qO[2] * qF[2] - qO[3] * qF[3],
1211         qO[0] * qF[1] + qO[1] * qF[0] + qO[2] * qF[3] - qO[3] * qF[2],
1212         qO[0] * qF[2] + qO[2] * qF[0] + qO[1] * qF[3] - qO[3] * qF[1],
1213         qO[0] * qF[3] + qO[3] * qF[0] + qO[1] * qF[2] - qO[2] * qF[1]};
1214 
1215     ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
1216                      sizeof(lensPoseRotation) / sizeof(float));
1217 
1218     // Only one camera facing each way, so 0 translation needed to the center of
1219     // the 'main' camera
1220     static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
1221 
1222     ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
1223                      sizeof(lensPoseTranslation) / sizeof(float));
1224 
1225     // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and
1226     // active array size
1227     float f_x = focalLength * mSensorWidth / sensorPhysicalSize[0];
1228     float f_y = focalLength * mSensorHeight / sensorPhysicalSize[1];
1229     float c_x = mSensorWidth / 2.f;
1230     float c_y = mSensorHeight / 2.f;
1231     float s = 0.f;
1232     const float lensIntrinsics[] = {f_x, f_y, c_x, c_y, s};
1233 
1234     ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
1235                      sizeof(lensIntrinsics) / sizeof(float));
1236 
1237     // No radial or tangential distortion
1238 
1239     float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
1240 
1241     ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
1242                      sizeof(lensRadialDistortion) / sizeof(float));
1243   }
1244 
1245   const uint8_t lensFacing =
1246       mFacingBack ? ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1247   ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1248 
1249   // android.flash
1250 
1251   const uint8_t flashAvailable = mFacingBack;
1252   ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1253 
1254   // android.tonemap
1255 
1256   if (hasCapability(MANUAL_POST_PROCESSING)) {
1257     static const int32_t tonemapCurvePoints = 128;
1258     ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1259 
1260     static const uint8_t availableToneMapModes[] = {
1261         ANDROID_TONEMAP_MODE_CONTRAST_CURVE, ANDROID_TONEMAP_MODE_FAST,
1262         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
1263     ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
1264                      availableToneMapModes, sizeof(availableToneMapModes));
1265   }
1266 
1267   // android.scaler
1268 
1269   const std::vector<int32_t> availableStreamConfigurationsBasic = {
1270       HAL_PIXEL_FORMAT_BLOB,
1271       width,
1272       height,
1273       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1274       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1275       320,
1276       240,
1277       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1278       HAL_PIXEL_FORMAT_YCbCr_420_888,
1279       320,
1280       240,
1281       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1282       HAL_PIXEL_FORMAT_BLOB,
1283       320,
1284       240,
1285       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1286   };
1287 
1288   // Always need to include 640x480 in basic formats
1289   const std::vector<int32_t> availableStreamConfigurationsBasic640 = {
1290       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1291       640,
1292       480,
1293       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1294       HAL_PIXEL_FORMAT_YCbCr_420_888,
1295       640,
1296       480,
1297       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1298       HAL_PIXEL_FORMAT_BLOB,
1299       640,
1300       480,
1301       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
1302 
1303   const std::vector<int32_t> availableStreamConfigurationsRaw = {
1304       HAL_PIXEL_FORMAT_RAW16,
1305       width,
1306       height,
1307       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1308   };
1309 
1310   const std::vector<int32_t> availableStreamConfigurationsBurst = {
1311       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1312       width,
1313       height,
1314       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1315       HAL_PIXEL_FORMAT_YCbCr_420_888,
1316       width,
1317       height,
1318       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1319       HAL_PIXEL_FORMAT_RGBA_8888,
1320       width,
1321       height,
1322       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1323   };
1324 
1325   std::vector<int32_t> availableStreamConfigurations;
1326 
1327   if (hasCapability(BACKWARD_COMPATIBLE)) {
1328     availableStreamConfigurations.insert(
1329         availableStreamConfigurations.end(),
1330         availableStreamConfigurationsBasic.begin(),
1331         availableStreamConfigurationsBasic.end());
1332     if (width > 640) {
1333       availableStreamConfigurations.insert(
1334           availableStreamConfigurations.end(),
1335           availableStreamConfigurationsBasic640.begin(),
1336           availableStreamConfigurationsBasic640.end());
1337     }
1338   }
1339   if (hasCapability(RAW)) {
1340     availableStreamConfigurations.insert(
1341         availableStreamConfigurations.end(),
1342         availableStreamConfigurationsRaw.begin(),
1343         availableStreamConfigurationsRaw.end());
1344   }
1345   if (hasCapability(BURST_CAPTURE)) {
1346     availableStreamConfigurations.insert(
1347         availableStreamConfigurations.end(),
1348         availableStreamConfigurationsBurst.begin(),
1349         availableStreamConfigurationsBurst.end());
1350   }
1351 
1352   if (availableStreamConfigurations.size() > 0) {
1353     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1354                      &availableStreamConfigurations[0],
1355                      availableStreamConfigurations.size());
1356   }
1357 
1358   const std::vector<int64_t> availableMinFrameDurationsBasic = {
1359       HAL_PIXEL_FORMAT_BLOB,
1360       width,
1361       height,
1362       Sensor::kFrameDurationRange[0],
1363       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1364       320,
1365       240,
1366       Sensor::kFrameDurationRange[0],
1367       HAL_PIXEL_FORMAT_YCbCr_420_888,
1368       320,
1369       240,
1370       Sensor::kFrameDurationRange[0],
1371       HAL_PIXEL_FORMAT_BLOB,
1372       320,
1373       240,
1374       Sensor::kFrameDurationRange[0],
1375   };
1376 
1377   // Always need to include 640x480 in basic formats
1378   const std::vector<int64_t> availableMinFrameDurationsBasic640 = {
1379       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1380       640,
1381       480,
1382       Sensor::kFrameDurationRange[0],
1383       HAL_PIXEL_FORMAT_YCbCr_420_888,
1384       640,
1385       480,
1386       Sensor::kFrameDurationRange[0],
1387       HAL_PIXEL_FORMAT_BLOB,
1388       640,
1389       480,
1390       Sensor::kFrameDurationRange[0]};
1391 
1392   const std::vector<int64_t> availableMinFrameDurationsRaw = {
1393       HAL_PIXEL_FORMAT_RAW16,
1394       width,
1395       height,
1396       Sensor::kFrameDurationRange[0],
1397   };
1398 
1399   const std::vector<int64_t> availableMinFrameDurationsBurst = {
1400       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1401       width,
1402       height,
1403       Sensor::kFrameDurationRange[0],
1404       HAL_PIXEL_FORMAT_YCbCr_420_888,
1405       width,
1406       height,
1407       Sensor::kFrameDurationRange[0],
1408       HAL_PIXEL_FORMAT_RGBA_8888,
1409       width,
1410       height,
1411       Sensor::kFrameDurationRange[0],
1412   };
1413 
1414   std::vector<int64_t> availableMinFrameDurations;
1415 
1416   if (hasCapability(BACKWARD_COMPATIBLE)) {
1417     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1418                                       availableMinFrameDurationsBasic.begin(),
1419                                       availableMinFrameDurationsBasic.end());
1420     if (width > 640) {
1421       availableMinFrameDurations.insert(
1422           availableMinFrameDurations.end(),
1423           availableMinFrameDurationsBasic640.begin(),
1424           availableMinFrameDurationsBasic640.end());
1425     }
1426   }
1427   if (hasCapability(RAW)) {
1428     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1429                                       availableMinFrameDurationsRaw.begin(),
1430                                       availableMinFrameDurationsRaw.end());
1431   }
1432   if (hasCapability(BURST_CAPTURE)) {
1433     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1434                                       availableMinFrameDurationsBurst.begin(),
1435                                       availableMinFrameDurationsBurst.end());
1436   }
1437 
1438   if (availableMinFrameDurations.size() > 0) {
1439     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1440                      &availableMinFrameDurations[0],
1441                      availableMinFrameDurations.size());
1442   }
1443 
1444   const std::vector<int64_t> availableStallDurationsBasic = {
1445       HAL_PIXEL_FORMAT_BLOB,
1446       width,
1447       height,
1448       Sensor::kFrameDurationRange[0],
1449       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1450       320,
1451       240,
1452       0,
1453       HAL_PIXEL_FORMAT_YCbCr_420_888,
1454       320,
1455       240,
1456       0,
1457       HAL_PIXEL_FORMAT_RGBA_8888,
1458       320,
1459       240,
1460       0,
1461   };
1462 
1463   // Always need to include 640x480 in basic formats
1464   const std::vector<int64_t> availableStallDurationsBasic640 = {
1465       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1466       640,
1467       480,
1468       0,
1469       HAL_PIXEL_FORMAT_YCbCr_420_888,
1470       640,
1471       480,
1472       0,
1473       HAL_PIXEL_FORMAT_BLOB,
1474       640,
1475       480,
1476       Sensor::kFrameDurationRange[0]};
1477 
1478   const std::vector<int64_t> availableStallDurationsRaw = {
1479       HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0]};
1480   const std::vector<int64_t> availableStallDurationsBurst = {
1481       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1482       width,
1483       height,
1484       0,
1485       HAL_PIXEL_FORMAT_YCbCr_420_888,
1486       width,
1487       height,
1488       0,
1489       HAL_PIXEL_FORMAT_RGBA_8888,
1490       width,
1491       height,
1492       0};
1493 
1494   std::vector<int64_t> availableStallDurations;
1495 
1496   if (hasCapability(BACKWARD_COMPATIBLE)) {
1497     availableStallDurations.insert(availableStallDurations.end(),
1498                                    availableStallDurationsBasic.begin(),
1499                                    availableStallDurationsBasic.end());
1500     if (width > 640) {
1501       availableStallDurations.insert(availableStallDurations.end(),
1502                                      availableStallDurationsBasic640.begin(),
1503                                      availableStallDurationsBasic640.end());
1504     }
1505   }
1506   if (hasCapability(RAW)) {
1507     availableStallDurations.insert(availableStallDurations.end(),
1508                                    availableStallDurationsRaw.begin(),
1509                                    availableStallDurationsRaw.end());
1510   }
1511   if (hasCapability(BURST_CAPTURE)) {
1512     availableStallDurations.insert(availableStallDurations.end(),
1513                                    availableStallDurationsBurst.begin(),
1514                                    availableStallDurationsBurst.end());
1515   }
1516 
1517   if (availableStallDurations.size() > 0) {
1518     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1519                      &availableStallDurations[0],
1520                      availableStallDurations.size());
1521   }
1522 
1523   if (hasCapability(BACKWARD_COMPATIBLE)) {
1524     static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1525     ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1526 
1527     static const float maxZoom = 10;
1528     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1);
1529   }
1530 
1531   // android.jpeg
1532 
1533   if (hasCapability(BACKWARD_COMPATIBLE)) {
1534     static const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
1535     ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
1536                      sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1537 
1538     static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1539     ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1540   }
1541 
1542   // android.stats
1543 
1544   if (hasCapability(BACKWARD_COMPATIBLE)) {
1545     static const uint8_t availableFaceDetectModes[] = {
1546         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1547         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1548         ANDROID_STATISTICS_FACE_DETECT_MODE_FULL};
1549     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1550                      availableFaceDetectModes,
1551                      sizeof(availableFaceDetectModes));
1552 
1553     static const int32_t maxFaceCount = 8;
1554     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
1555 
1556     static const uint8_t availableShadingMapModes[] = {
1557         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF};
1558     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1559                      availableShadingMapModes,
1560                      sizeof(availableShadingMapModes));
1561   }
1562 
1563   // android.sync
1564 
1565   static const int32_t maxLatency =
1566       hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL
1567                                 : 3;
1568   ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1569 
1570   // android.control
1571 
1572   if (hasCapability(BACKWARD_COMPATIBLE)) {
1573     static const uint8_t availableControlModes[] = {
1574         ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO,
1575         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
1576     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1577                      sizeof(availableControlModes));
1578   } else {
1579     static const uint8_t availableControlModes[] = {ANDROID_CONTROL_MODE_AUTO};
1580     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1581                      sizeof(availableControlModes));
1582   }
1583 
1584   static const uint8_t availableSceneModes[] = {
1585     static_cast<uint8_t>(hasCapability(BACKWARD_COMPATIBLE)
1586                          ? ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1587                          : ANDROID_CONTROL_SCENE_MODE_DISABLED)};
1588   ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, availableSceneModes,
1589                    sizeof(availableSceneModes));
1590 
1591   if (hasCapability(BACKWARD_COMPATIBLE)) {
1592     static const uint8_t availableEffects[] = {ANDROID_CONTROL_EFFECT_MODE_OFF};
1593     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS, availableEffects,
1594                      sizeof(availableEffects));
1595   }
1596 
1597   if (hasCapability(BACKWARD_COMPATIBLE)) {
1598     static const int32_t max3aRegions[] = {/*AE*/ 1, /*AWB*/ 0, /*AF*/ 1};
1599     ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS, max3aRegions,
1600                      sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1601 
1602     static const uint8_t availableAeModes[] = {ANDROID_CONTROL_AE_MODE_OFF,
1603                                                ANDROID_CONTROL_AE_MODE_ON};
1604     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES, availableAeModes,
1605                      sizeof(availableAeModes));
1606 
1607     static const camera_metadata_rational exposureCompensationStep = {1, 3};
1608     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1609                      &exposureCompensationStep, 1);
1610 
1611     int32_t exposureCompensationRange[] = {-9, 9};
1612     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1613                      exposureCompensationRange,
1614                      sizeof(exposureCompensationRange) / sizeof(int32_t));
1615   }
1616 
1617   static const int32_t availableTargetFpsRanges[] = {5,  30, 15, 30,
1618                                                      15, 15, 30, 30};
1619   ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1620                    availableTargetFpsRanges,
1621                    sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1622 
1623   if (hasCapability(BACKWARD_COMPATIBLE)) {
1624     static const uint8_t availableAntibandingModes[] = {
1625         ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1626         ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO};
1627     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1628                      availableAntibandingModes,
1629                      sizeof(availableAntibandingModes));
1630   }
1631 
1632   static const uint8_t aeLockAvailable =
1633       hasCapability(BACKWARD_COMPATIBLE)
1634           ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
1635           : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1636 
1637   ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
1638 
1639   if (hasCapability(BACKWARD_COMPATIBLE)) {
1640     static const uint8_t availableAwbModes[] = {
1641         ANDROID_CONTROL_AWB_MODE_OFF,
1642         ANDROID_CONTROL_AWB_MODE_AUTO,
1643         ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1644         ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1645         ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1646         ANDROID_CONTROL_AWB_MODE_SHADE};
1647     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES, availableAwbModes,
1648                      sizeof(availableAwbModes));
1649   }
1650 
1651   static const uint8_t awbLockAvailable =
1652       hasCapability(BACKWARD_COMPATIBLE)
1653           ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
1654           : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1655 
1656   ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
1657 
1658   static const uint8_t availableAfModesBack[] = {
1659       ANDROID_CONTROL_AF_MODE_OFF, ANDROID_CONTROL_AF_MODE_AUTO,
1660       ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1661       ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE};
1662 
1663   static const uint8_t availableAfModesFront[] = {ANDROID_CONTROL_AF_MODE_OFF};
1664 
1665   if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1666     ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesBack,
1667                      sizeof(availableAfModesBack));
1668   } else {
1669     ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesFront,
1670                      sizeof(availableAfModesFront));
1671   }
1672 
1673   static const uint8_t availableVstabModes[] = {
1674       ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1675   ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1676                    availableVstabModes, sizeof(availableVstabModes));
1677 
1678   // android.colorCorrection
1679 
1680   if (hasCapability(BACKWARD_COMPATIBLE)) {
1681     static const uint8_t availableAberrationModes[] = {
1682         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1683         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1684         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
1685     ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1686                      availableAberrationModes,
1687                      sizeof(availableAberrationModes));
1688   } else {
1689     static const uint8_t availableAberrationModes[] = {
1690         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1691     };
1692     ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1693                      availableAberrationModes,
1694                      sizeof(availableAberrationModes));
1695   }
1696   // android.edge
1697 
1698   if (hasCapability(BACKWARD_COMPATIBLE)) {
1699     static const uint8_t availableEdgeModes[] = {
1700         ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST,
1701         ANDROID_EDGE_MODE_HIGH_QUALITY};
1702     ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1703                      sizeof(availableEdgeModes));
1704   } else {
1705     static const uint8_t availableEdgeModes[] = {ANDROID_EDGE_MODE_OFF};
1706     ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1707                      sizeof(availableEdgeModes));
1708   }
1709 
1710   // android.info
1711 
1712   static const uint8_t supportedHardwareLevel =
1713       hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL
1714                                 : ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1715   ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1716                    &supportedHardwareLevel,
1717                    /*count*/ 1);
1718 
1719   // android.noiseReduction
1720 
1721   if (hasCapability(BACKWARD_COMPATIBLE)) {
1722     static const uint8_t availableNoiseReductionModes[] = {
1723         ANDROID_NOISE_REDUCTION_MODE_OFF, ANDROID_NOISE_REDUCTION_MODE_FAST,
1724         ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY};
1725     ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1726                      availableNoiseReductionModes,
1727                      sizeof(availableNoiseReductionModes));
1728   } else {
1729     static const uint8_t availableNoiseReductionModes[] = {
1730         ANDROID_NOISE_REDUCTION_MODE_OFF,
1731     };
1732     ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1733                      availableNoiseReductionModes,
1734                      sizeof(availableNoiseReductionModes));
1735   }
1736 
1737   // android.depth
1738 
1739   if (hasCapability(DEPTH_OUTPUT)) {
1740     static const int32_t maxDepthSamples = 100;
1741     ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &maxDepthSamples, 1);
1742 
1743     static const int32_t availableDepthStreamConfigurations[] = {
1744         HAL_PIXEL_FORMAT_Y16,
1745         160,
1746         120,
1747         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
1748         HAL_PIXEL_FORMAT_BLOB,
1749         maxDepthSamples,
1750         1,
1751         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT};
1752     ADD_STATIC_ENTRY(
1753         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
1754         availableDepthStreamConfigurations,
1755         sizeof(availableDepthStreamConfigurations) / sizeof(int32_t));
1756 
1757     static const int64_t availableDepthMinFrameDurations[] = {
1758         HAL_PIXEL_FORMAT_Y16,
1759         160,
1760         120,
1761         Sensor::kFrameDurationRange[0],
1762         HAL_PIXEL_FORMAT_BLOB,
1763         maxDepthSamples,
1764         1,
1765         Sensor::kFrameDurationRange[0]};
1766     ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
1767                      availableDepthMinFrameDurations,
1768                      sizeof(availableDepthMinFrameDurations) / sizeof(int64_t));
1769 
1770     static const int64_t availableDepthStallDurations[] = {
1771         HAL_PIXEL_FORMAT_Y16,
1772         160,
1773         120,
1774         Sensor::kFrameDurationRange[0],
1775         HAL_PIXEL_FORMAT_BLOB,
1776         maxDepthSamples,
1777         1,
1778         Sensor::kFrameDurationRange[0]};
1779     ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
1780                      availableDepthStallDurations,
1781                      sizeof(availableDepthStallDurations) / sizeof(int64_t));
1782 
1783     uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
1784     ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthIsExclusive, 1);
1785   }
1786 
1787   // android.shading
1788 
1789   if (hasCapability(BACKWARD_COMPATIBLE)) {
1790     static const uint8_t availableShadingModes[] = {
1791         ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST,
1792         ANDROID_SHADING_MODE_HIGH_QUALITY};
1793     ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1794                      sizeof(availableShadingModes));
1795   } else {
1796     static const uint8_t availableShadingModes[] = {ANDROID_SHADING_MODE_OFF};
1797     ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1798                      sizeof(availableShadingModes));
1799   }
1800 
1801   // android.request
1802 
1803   static const int32_t maxNumOutputStreams[] = {
1804       kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount};
1805   ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams,
1806                    3);
1807 
1808   static const uint8_t maxPipelineDepth = kMaxBufferCount;
1809   ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1810 
1811   static const int32_t partialResultCount = 1;
1812   ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount,
1813                    /*count*/ 1);
1814 
1815   SortedVector<uint8_t> caps;
1816   for (size_t i = 0; i < mCapabilities.size(); i++) {
1817     switch (mCapabilities[i]) {
1818       case BACKWARD_COMPATIBLE:
1819         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1820         break;
1821       case MANUAL_SENSOR:
1822         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
1823         break;
1824       case MANUAL_POST_PROCESSING:
1825         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
1826         break;
1827       case RAW:
1828         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
1829         break;
1830       case PRIVATE_REPROCESSING:
1831         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1832         break;
1833       case READ_SENSOR_SETTINGS:
1834         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1835         break;
1836       case BURST_CAPTURE:
1837         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1838         break;
1839       case YUV_REPROCESSING:
1840         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1841         break;
1842       case DEPTH_OUTPUT:
1843         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
1844         break;
1845       case CONSTRAINED_HIGH_SPEED_VIDEO:
1846         caps.add(
1847             ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1848         break;
1849       default:
1850         // Ignore LEVELs
1851         break;
1852     }
1853   }
1854   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(),
1855                    caps.size());
1856 
1857   // Scan a default request template for included request keys
1858   Vector<int32_t> availableRequestKeys;
1859   const camera_metadata_t *previewRequest =
1860       constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1861   for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
1862     camera_metadata_ro_entry_t entry;
1863     get_camera_metadata_ro_entry(previewRequest, i, &entry);
1864     availableRequestKeys.add(entry.tag);
1865   }
1866   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
1867                    availableRequestKeys.array(), availableRequestKeys.size());
1868 
1869   // Add a few more result keys. Must be kept up to date with the various places
1870   // that add these
1871 
1872   Vector<int32_t> availableResultKeys(availableRequestKeys);
1873   if (hasCapability(BACKWARD_COMPATIBLE)) {
1874     availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1875     availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1876     availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1877     availableResultKeys.add(ANDROID_FLASH_STATE);
1878     availableResultKeys.add(ANDROID_LENS_STATE);
1879     availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1880     availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1881     availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1882   }
1883 
1884   if (hasCapability(DEPTH_OUTPUT)) {
1885     availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
1886     availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
1887     availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
1888     availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
1889   }
1890 
1891   availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1892   availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1893 
1894   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1895                    availableResultKeys.array(), availableResultKeys.size());
1896 
1897   // Needs to be last, to collect all the keys set
1898 
1899   availableCharacteristicsKeys.add(
1900       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1901   info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1902               availableCharacteristicsKeys);
1903 
1904   mCameraInfo = info.release();
1905 
1906 #undef ADD_STATIC_ENTRY
1907   return OK;
1908 }
1909 
process3A(CameraMetadata & settings)1910 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1911   /**
1912    * Extract top-level 3A controls
1913    */
1914   status_t res;
1915 
1916   camera_metadata_entry e;
1917 
1918   e = settings.find(ANDROID_CONTROL_MODE);
1919   if (e.count == 0) {
1920     ALOGE("%s: No control mode entry!", __FUNCTION__);
1921     return BAD_VALUE;
1922   }
1923   uint8_t controlMode = e.data.u8[0];
1924 
1925   if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1926     mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1927     mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1928     mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1929     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1930     mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1931     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1932     update3A(settings);
1933     return OK;
1934   } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1935     if (!hasCapability(BACKWARD_COMPATIBLE)) {
1936       ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1937             __FUNCTION__);
1938       return BAD_VALUE;
1939     }
1940 
1941     e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1942     if (e.count == 0) {
1943       ALOGE("%s: No scene mode entry!", __FUNCTION__);
1944       return BAD_VALUE;
1945     }
1946     uint8_t sceneMode = e.data.u8[0];
1947 
1948     switch (sceneMode) {
1949       case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1950         mFacePriority = true;
1951         break;
1952       default:
1953         ALOGE("%s: Emulator doesn't support scene mode %d", __FUNCTION__,
1954               sceneMode);
1955         return BAD_VALUE;
1956     }
1957   } else {
1958     mFacePriority = false;
1959   }
1960 
1961   // controlMode == AUTO or sceneMode = FACE_PRIORITY
1962   // Process individual 3A controls
1963 
1964   res = doFakeAE(settings);
1965   if (res != OK) return res;
1966 
1967   res = doFakeAF(settings);
1968   if (res != OK) return res;
1969 
1970   res = doFakeAWB(settings);
1971   if (res != OK) return res;
1972 
1973   update3A(settings);
1974   return OK;
1975 }
1976 
doFakeAE(CameraMetadata & settings)1977 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1978   camera_metadata_entry e;
1979 
1980   e = settings.find(ANDROID_CONTROL_AE_MODE);
1981   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1982     ALOGE("%s: No AE mode entry!", __FUNCTION__);
1983     return BAD_VALUE;
1984   }
1985   uint8_t aeMode =
1986       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1987   mAeMode = aeMode;
1988 
1989   switch (aeMode) {
1990     case ANDROID_CONTROL_AE_MODE_OFF:
1991       // AE is OFF
1992       mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1993       return OK;
1994     case ANDROID_CONTROL_AE_MODE_ON:
1995       // OK for AUTO modes
1996       break;
1997     default:
1998       // Mostly silently ignore unsupported modes
1999       ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
2000             __FUNCTION__, aeMode);
2001       break;
2002   }
2003 
2004   e = settings.find(ANDROID_CONTROL_AE_LOCK);
2005   bool aeLocked =
2006       (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
2007 
2008   e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2009   bool precaptureTrigger = false;
2010   if (e.count != 0) {
2011     precaptureTrigger =
2012         (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2013   }
2014 
2015   if (precaptureTrigger) {
2016     ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2017   } else if (e.count > 0) {
2018     ALOGV("%s: Pre capture trigger was present? %zu", __FUNCTION__, e.count);
2019   }
2020 
2021   if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2022     // Run precapture sequence
2023     if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2024       mAeCounter = 0;
2025     }
2026 
2027     if (mFacePriority) {
2028       mAeTargetExposureTime = kFacePriorityExposureTime;
2029     } else {
2030       mAeTargetExposureTime = kNormalExposureTime;
2031     }
2032 
2033     if (mAeCounter > kPrecaptureMinFrames &&
2034         (mAeTargetExposureTime - mAeCurrentExposureTime) <
2035             mAeTargetExposureTime / 10) {
2036       // Done with precapture
2037       mAeCounter = 0;
2038       mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED
2039                           : ANDROID_CONTROL_AE_STATE_CONVERGED;
2040     } else {
2041       // Converge some more
2042       mAeCurrentExposureTime +=
2043           (mAeTargetExposureTime - mAeCurrentExposureTime) * kExposureTrackRate;
2044       mAeCounter++;
2045       mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2046     }
2047 
2048   } else if (!aeLocked) {
2049     // Run standard occasional AE scan
2050     switch (mAeState) {
2051       case ANDROID_CONTROL_AE_STATE_CONVERGED:
2052       case ANDROID_CONTROL_AE_STATE_INACTIVE:
2053         mAeCounter++;
2054         if (mAeCounter > kStableAeMaxFrames) {
2055           mAeTargetExposureTime =
2056               mFacePriority ? kFacePriorityExposureTime : kNormalExposureTime;
2057           float exposureStep = ((double)rand() / RAND_MAX) *
2058                                    (kExposureWanderMax - kExposureWanderMin) +
2059                                kExposureWanderMin;
2060           mAeTargetExposureTime *= std::pow(2, exposureStep);
2061           mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2062         }
2063         break;
2064       case ANDROID_CONTROL_AE_STATE_SEARCHING:
2065         mAeCurrentExposureTime +=
2066             (mAeTargetExposureTime - mAeCurrentExposureTime) *
2067             kExposureTrackRate;
2068         if (llabs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2069             mAeTargetExposureTime / 10) {
2070           // Close enough
2071           mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2072           mAeCounter = 0;
2073         }
2074         break;
2075       case ANDROID_CONTROL_AE_STATE_LOCKED:
2076         mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2077         mAeCounter = 0;
2078         break;
2079       default:
2080         ALOGE("%s: Emulator in unexpected AE state %d", __FUNCTION__, mAeState);
2081         return INVALID_OPERATION;
2082     }
2083   } else {
2084     // AE is locked
2085     mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2086   }
2087 
2088   return OK;
2089 }
2090 
doFakeAF(CameraMetadata & settings)2091 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2092   camera_metadata_entry e;
2093 
2094   e = settings.find(ANDROID_CONTROL_AF_MODE);
2095   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2096     ALOGE("%s: No AF mode entry!", __FUNCTION__);
2097     return BAD_VALUE;
2098   }
2099   uint8_t afMode =
2100       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
2101 
2102   e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2103   typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2104   af_trigger_t afTrigger;
2105   if (e.count != 0) {
2106     afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2107 
2108     ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2109     ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2110   } else {
2111     afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2112   }
2113 
2114   switch (afMode) {
2115     case ANDROID_CONTROL_AF_MODE_OFF:
2116       mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2117       return OK;
2118     case ANDROID_CONTROL_AF_MODE_AUTO:
2119     case ANDROID_CONTROL_AF_MODE_MACRO:
2120     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2121     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2122       if (!mFacingBack) {
2123         ALOGE("%s: Front camera doesn't support AF mode %d", __FUNCTION__,
2124               afMode);
2125         return BAD_VALUE;
2126       }
2127       // OK, handle transitions lower on
2128       break;
2129     default:
2130       ALOGE("%s: Emulator doesn't support AF mode %d", __FUNCTION__, afMode);
2131       return BAD_VALUE;
2132   }
2133 
2134   bool afModeChanged = mAfMode != afMode;
2135   mAfMode = afMode;
2136 
2137   /**
2138    * Simulate AF triggers. Transition at most 1 state per frame.
2139    * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2140    */
2141 
2142   bool afTriggerStart = false;
2143   bool afTriggerCancel = false;
2144   switch (afTrigger) {
2145     case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2146       break;
2147     case ANDROID_CONTROL_AF_TRIGGER_START:
2148       afTriggerStart = true;
2149       break;
2150     case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2151       afTriggerCancel = true;
2152       // Cancel trigger always transitions into INACTIVE
2153       mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2154 
2155       ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2156 
2157       // Stay in 'inactive' until at least next frame
2158       return OK;
2159     default:
2160       ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2161       return BAD_VALUE;
2162   }
2163 
2164   // If we get down here, we're either in an autofocus mode
2165   //  or in a continuous focus mode (and no other modes)
2166 
2167   int oldAfState = mAfState;
2168   switch (mAfState) {
2169     case ANDROID_CONTROL_AF_STATE_INACTIVE:
2170       if (afTriggerStart) {
2171         switch (afMode) {
2172           case ANDROID_CONTROL_AF_MODE_AUTO:
2173             // fall-through
2174           case ANDROID_CONTROL_AF_MODE_MACRO:
2175             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2176             break;
2177           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2178             // fall-through
2179           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2180             mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2181             break;
2182         }
2183       } else {
2184         // At least one frame stays in INACTIVE
2185         if (!afModeChanged) {
2186           switch (afMode) {
2187             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2188               // fall-through
2189             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2190               mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2191               break;
2192           }
2193         }
2194       }
2195       break;
2196     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2197       /**
2198        * When the AF trigger is activated, the algorithm should finish
2199        * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2200        * or AF_NOT_FOCUSED as appropriate
2201        */
2202       if (afTriggerStart) {
2203         // Randomly transition to focused or not focused
2204         if (rand() % 3) {
2205           mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2206         } else {
2207           mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2208         }
2209       }
2210       /**
2211        * When the AF trigger is not involved, the AF algorithm should
2212        * start in INACTIVE state, and then transition into PASSIVE_SCAN
2213        * and PASSIVE_FOCUSED states
2214        */
2215       else if (!afTriggerCancel) {
2216         // Randomly transition to passive focus
2217         if (rand() % 3 == 0) {
2218           mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2219         }
2220       }
2221 
2222       break;
2223     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2224       if (afTriggerStart) {
2225         // Randomly transition to focused or not focused
2226         if (rand() % 3) {
2227           mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2228         } else {
2229           mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2230         }
2231       }
2232       // TODO: initiate passive scan (PASSIVE_SCAN)
2233       break;
2234     case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2235       // Simulate AF sweep completing instantaneously
2236 
2237       // Randomly transition to focused or not focused
2238       if (rand() % 3) {
2239         mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2240       } else {
2241         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2242       }
2243       break;
2244     case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2245       if (afTriggerStart) {
2246         switch (afMode) {
2247           case ANDROID_CONTROL_AF_MODE_AUTO:
2248             // fall-through
2249           case ANDROID_CONTROL_AF_MODE_MACRO:
2250             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2251             break;
2252           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2253             // fall-through
2254           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2255             // continuous autofocus => trigger start has no effect
2256             break;
2257         }
2258       }
2259       break;
2260     case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2261       if (afTriggerStart) {
2262         switch (afMode) {
2263           case ANDROID_CONTROL_AF_MODE_AUTO:
2264             // fall-through
2265           case ANDROID_CONTROL_AF_MODE_MACRO:
2266             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2267             break;
2268           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2269             // fall-through
2270           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2271             // continuous autofocus => trigger start has no effect
2272             break;
2273         }
2274       }
2275       break;
2276     default:
2277       ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2278   }
2279 
2280   {
2281     char afStateString[100] = {
2282         0,
2283     };
2284     camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, oldAfState,
2285                                  afStateString, sizeof(afStateString));
2286 
2287     char afNewStateString[100] = {
2288         0,
2289     };
2290     camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, mAfState,
2291                                  afNewStateString, sizeof(afNewStateString));
2292     ALOGVV("%s: AF state transitioned from %s to %s", __FUNCTION__,
2293            afStateString, afNewStateString);
2294   }
2295 
2296   return OK;
2297 }
2298 
doFakeAWB(CameraMetadata & settings)2299 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2300   camera_metadata_entry e;
2301 
2302   e = settings.find(ANDROID_CONTROL_AWB_MODE);
2303   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2304     ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2305     return BAD_VALUE;
2306   }
2307   uint8_t awbMode =
2308       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
2309 
2310   // TODO: Add white balance simulation
2311 
2312   e = settings.find(ANDROID_CONTROL_AWB_LOCK);
2313   bool awbLocked =
2314       (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
2315 
2316   switch (awbMode) {
2317     case ANDROID_CONTROL_AWB_MODE_OFF:
2318       mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2319       break;
2320     case ANDROID_CONTROL_AWB_MODE_AUTO:
2321     case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2322     case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2323     case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2324     case ANDROID_CONTROL_AWB_MODE_SHADE:
2325       // Always magically right, or locked
2326       mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED
2327                             : ANDROID_CONTROL_AWB_STATE_CONVERGED;
2328       break;
2329     default:
2330       ALOGE("%s: Emulator doesn't support AWB mode %d", __FUNCTION__, awbMode);
2331       return BAD_VALUE;
2332   }
2333 
2334   return OK;
2335 }
2336 
update3A(CameraMetadata & settings)2337 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2338   if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
2339     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &mAeCurrentExposureTime, 1);
2340     settings.update(ANDROID_SENSOR_SENSITIVITY, &mAeCurrentSensitivity, 1);
2341   }
2342 
2343   settings.update(ANDROID_CONTROL_AE_STATE, &mAeState, 1);
2344   settings.update(ANDROID_CONTROL_AF_STATE, &mAfState, 1);
2345   settings.update(ANDROID_CONTROL_AWB_STATE, &mAwbState, 1);
2346 
2347   uint8_t lensState;
2348   switch (mAfState) {
2349     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2350     case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2351       lensState = ANDROID_LENS_STATE_MOVING;
2352       break;
2353     case ANDROID_CONTROL_AF_STATE_INACTIVE:
2354     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2355     case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2356     case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2357     case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2358     default:
2359       lensState = ANDROID_LENS_STATE_STATIONARY;
2360       break;
2361   }
2362   settings.update(ANDROID_LENS_STATE, &lensState, 1);
2363 }
2364 
signalReadoutIdle()2365 void EmulatedFakeCamera3::signalReadoutIdle() {
2366   Mutex::Autolock l(mLock);
2367   // Need to chek isIdle again because waiting on mLock may have allowed
2368   // something to be placed in the in-flight queue.
2369   if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2370     ALOGV("Now idle");
2371     mStatus = STATUS_READY;
2372   }
2373 }
2374 
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)2375 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2376                                         nsecs_t timestamp) {
2377   switch (e) {
2378     case Sensor::SensorListener::EXPOSURE_START: {
2379       ALOGVV("%s: Frame %d: Sensor started exposure at %lld", __FUNCTION__,
2380              frameNumber, timestamp);
2381       // Trigger shutter notify to framework
2382       camera3_notify_msg_t msg;
2383       msg.type = CAMERA3_MSG_SHUTTER;
2384       msg.message.shutter.frame_number = frameNumber;
2385       msg.message.shutter.timestamp = timestamp;
2386       sendNotify(&msg);
2387       break;
2388     }
2389     default:
2390       ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, e,
2391             timestamp);
2392       break;
2393   }
2394 }
2395 
ReadoutThread(EmulatedFakeCamera3 * parent)2396 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent)
2397     : mParent(parent), mJpegWaiting(false) {}
2398 
~ReadoutThread()2399 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2400   for (List<Request>::iterator i = mInFlightQueue.begin();
2401        i != mInFlightQueue.end(); i++) {
2402     delete i->buffers;
2403     delete i->sensorBuffers;
2404   }
2405 }
2406 
queueCaptureRequest(const Request & r)2407 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2408   Mutex::Autolock l(mLock);
2409 
2410   mInFlightQueue.push_back(r);
2411   mInFlightSignal.signal();
2412 }
2413 
isIdle()2414 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2415   Mutex::Autolock l(mLock);
2416   return mInFlightQueue.empty() && !mThreadActive;
2417 }
2418 
waitForReadout()2419 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2420   status_t res;
2421   Mutex::Autolock l(mLock);
2422   int loopCount = 0;
2423   while (mInFlightQueue.size() >= kMaxQueueSize) {
2424     res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2425     if (res != OK && res != TIMED_OUT) {
2426       ALOGE("%s: Error waiting for in-flight queue to shrink", __FUNCTION__);
2427       return INVALID_OPERATION;
2428     }
2429     if (loopCount == kMaxWaitLoops) {
2430       ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2431             __FUNCTION__);
2432       return TIMED_OUT;
2433     }
2434     loopCount++;
2435   }
2436   return OK;
2437 }
2438 
threadLoop()2439 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2440   status_t res;
2441 
2442   ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2443 
2444   // First wait for a request from the in-flight queue
2445 
2446   if (mCurrentRequest.settings.isEmpty()) {
2447     Mutex::Autolock l(mLock);
2448     if (mInFlightQueue.empty()) {
2449       res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2450       if (res == TIMED_OUT) {
2451         ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2452                __FUNCTION__);
2453         return true;
2454       } else if (res != NO_ERROR) {
2455         ALOGE("%s: Error waiting for capture requests: %d", __FUNCTION__, res);
2456         return false;
2457       }
2458     }
2459     mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2460     mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2461     mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2462     mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2463     mInFlightQueue.erase(mInFlightQueue.begin());
2464     mInFlightSignal.signal();
2465     mThreadActive = true;
2466     ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2467            mCurrentRequest.frameNumber);
2468   }
2469 
2470   // Then wait for it to be delivered from the sensor
2471   ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2472          __FUNCTION__);
2473 
2474   nsecs_t captureTime;
2475   bool gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2476   if (!gotFrame) {
2477     ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2478            __FUNCTION__);
2479     return true;
2480   }
2481 
2482   ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2483          mCurrentRequest.frameNumber, captureTime);
2484 
2485   // Check if we need to JPEG encode a buffer, and send it for async
2486   // compression if so. Otherwise prepare the buffer for return.
2487   bool needJpeg = false;
2488   HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2489   while (buf != mCurrentRequest.buffers->end()) {
2490     bool goodBuffer = true;
2491     if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
2492         buf->stream->data_space != HAL_DATASPACE_DEPTH) {
2493       Mutex::Autolock jl(mJpegLock);
2494       if (mJpegWaiting) {
2495         // This shouldn't happen, because processCaptureRequest should
2496         // be stalling until JPEG compressor is free.
2497         ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2498         goodBuffer = false;
2499       }
2500       if (goodBuffer) {
2501         // Compressor takes ownership of sensorBuffers here
2502         res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2503                                               this);
2504         goodBuffer = (res == OK);
2505       }
2506       if (goodBuffer) {
2507         needJpeg = true;
2508 
2509         mJpegHalBuffer = *buf;
2510         mJpegFrameNumber = mCurrentRequest.frameNumber;
2511         mJpegWaiting = true;
2512 
2513         mCurrentRequest.sensorBuffers = NULL;
2514         buf = mCurrentRequest.buffers->erase(buf);
2515 
2516         continue;
2517       }
2518       ALOGE("%s: Error compressing output buffer: %s (%d)", __FUNCTION__,
2519             strerror(-res), res);
2520       // fallthrough for cleanup
2521     }
2522     GrallocModule::getInstance().unlock(*(buf->buffer));
2523 
2524     buf->status =
2525         goodBuffer ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2526     buf->acquire_fence = -1;
2527     buf->release_fence = -1;
2528 
2529     ++buf;
2530   }  // end while
2531 
2532   // Construct result for all completed buffers and results
2533 
2534   camera3_capture_result result;
2535 
2536   if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2537     static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2538     mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2539                                     &sceneFlicker, 1);
2540 
2541     static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2542     mCurrentRequest.settings.update(ANDROID_FLASH_STATE, &flashState, 1);
2543 
2544     nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
2545     mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2546                                     &rollingShutterSkew, 1);
2547 
2548     float focusRange[] = {1.0f / 5.0f, 0};  // 5 m to infinity in focus
2549     mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2550                                     sizeof(focusRange) / sizeof(float));
2551   }
2552 
2553   if (mParent->hasCapability(DEPTH_OUTPUT)) {
2554     camera_metadata_entry_t entry;
2555 
2556     find_camera_metadata_entry(mParent->mCameraInfo,
2557                                ANDROID_LENS_POSE_TRANSLATION, &entry);
2558     mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION, entry.data.f,
2559                                     entry.count);
2560 
2561     find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION,
2562                                &entry);
2563     mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION, entry.data.f,
2564                                     entry.count);
2565 
2566     find_camera_metadata_entry(mParent->mCameraInfo,
2567                                ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
2568     mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
2569                                     entry.data.f, entry.count);
2570 
2571     find_camera_metadata_entry(mParent->mCameraInfo,
2572                                ANDROID_LENS_RADIAL_DISTORTION, &entry);
2573     mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
2574                                     entry.data.f, entry.count);
2575   }
2576 
2577   mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, &captureTime, 1);
2578 
2579   // JPEGs take a stage longer
2580   const uint8_t pipelineDepth =
2581       needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2582   mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2583                                   &pipelineDepth, 1);
2584 
2585   result.frame_number = mCurrentRequest.frameNumber;
2586   result.result = mCurrentRequest.settings.getAndLock();
2587   result.num_output_buffers = mCurrentRequest.buffers->size();
2588   result.output_buffers = mCurrentRequest.buffers->array();
2589   result.input_buffer = nullptr;
2590   result.partial_result = 1;
2591 
2592   // Go idle if queue is empty, before sending result
2593   bool signalIdle = false;
2594   {
2595     Mutex::Autolock l(mLock);
2596     if (mInFlightQueue.empty()) {
2597       mThreadActive = false;
2598       signalIdle = true;
2599     }
2600   }
2601   if (signalIdle) mParent->signalReadoutIdle();
2602 
2603   // Send it off to the framework
2604   ALOGVV("%s: ReadoutThread: Send result to framework", __FUNCTION__);
2605   mParent->sendCaptureResult(&result);
2606 
2607   // Clean up
2608   mCurrentRequest.settings.unlock(result.result);
2609 
2610   delete mCurrentRequest.buffers;
2611   mCurrentRequest.buffers = NULL;
2612   if (!needJpeg) {
2613     delete mCurrentRequest.sensorBuffers;
2614     mCurrentRequest.sensorBuffers = NULL;
2615   }
2616   mCurrentRequest.settings.clear();
2617 
2618   return true;
2619 }
2620 
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2621 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2622     const StreamBuffer &jpegBuffer, bool success) {
2623   Mutex::Autolock jl(mJpegLock);
2624 
2625   GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
2626 
2627   mJpegHalBuffer.status =
2628       success ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2629   mJpegHalBuffer.acquire_fence = -1;
2630   mJpegHalBuffer.release_fence = -1;
2631   mJpegWaiting = false;
2632 
2633   camera3_capture_result result;
2634 
2635   result.frame_number = mJpegFrameNumber;
2636   result.result = NULL;
2637   result.num_output_buffers = 1;
2638   result.output_buffers = &mJpegHalBuffer;
2639   result.input_buffer = nullptr;
2640   result.partial_result = 0;
2641 
2642   if (!success) {
2643     ALOGE(
2644         "%s: Compression failure, returning error state buffer to"
2645         " framework",
2646         __FUNCTION__);
2647   } else {
2648     ALOGV("%s: Compression complete, returning buffer to framework",
2649           __FUNCTION__);
2650   }
2651 
2652   mParent->sendCaptureResult(&result);
2653 }
2654 
onJpegInputDone(const StreamBuffer &)2655 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2656     const StreamBuffer & /*inputBuffer*/) {
2657   // Should never get here, since the input buffer has to be returned
2658   // by end of processCaptureRequest
2659   ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2660 }
2661 
2662 };  // namespace android
2663