• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /*
18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19  * functionality of an advanced fake camera.
20  */
21 
22 #include <cstdint>
23 #include <inttypes.h>
24 
25 //#define LOG_NDEBUG 0
26 //#define LOG_NNDEBUG 0
27 #define LOG_TAG "EmulatedCamera_FakeCamera3"
28 #include <cutils/properties.h>
29 #include <utils/Log.h>
30 
31 #include <ui/Fence.h>
32 #include "EmulatedCameraFactory.h"
33 #include "EmulatedFakeCamera3.h"
34 #include "GrallocModule.h"
35 
36 #include <cmath>
37 #include "fake-pipeline2/JpegCompressor.h"
38 #include "fake-pipeline2/Sensor.h"
39 
40 #include <vector>
41 
42 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
43 #define ALOGVV ALOGV
44 #else
45 #define ALOGVV(...) ((void)0)
46 #endif
47 
48 namespace android {
49 
50 /**
51  * Constants for camera capabilities
52  */
53 
54 const int64_t USEC = 1000LL;
55 const int64_t MSEC = USEC * 1000LL;
56 // const int64_t SEC = MSEC * 1000LL;
57 
58 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
59     HAL_PIXEL_FORMAT_RAW16, HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888,
60     HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
61     // These are handled by YCbCr_420_888
62     //        HAL_PIXEL_FORMAT_YV12,
63     //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
64     HAL_PIXEL_FORMAT_YCbCr_420_888, HAL_PIXEL_FORMAT_Y16};
65 
66 /**
67  * 3A constants
68  */
69 
70 // Default exposure and gain targets for different scenarios
71 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
72 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
73 const int EmulatedFakeCamera3::kNormalSensitivity = 100;
74 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
75 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
76 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
77 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
78 const float EmulatedFakeCamera3::kExposureWanderMin = -2;
79 const float EmulatedFakeCamera3::kExposureWanderMax = 1;
80 
81 /**
82  * Camera device lifecycle methods
83  */
84 
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module)85 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
86                                          struct hw_module_t *module)
87     : EmulatedCamera3(cameraId, module), mFacingBack(facingBack) {
88   ALOGI("Constructing emulated fake camera 3: ID %d, facing %s", mCameraID,
89         facingBack ? "back" : "front");
90 
91   for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
92     mDefaultTemplates[i] = NULL;
93   }
94 }
95 
~EmulatedFakeCamera3()96 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
97   for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
98     if (mDefaultTemplates[i] != NULL) {
99       free_camera_metadata(mDefaultTemplates[i]);
100     }
101   }
102 }
103 
Initialize(const cvd::CameraDefinition & params)104 status_t EmulatedFakeCamera3::Initialize(const cvd::CameraDefinition &params) {
105   ALOGV("%s: E", __FUNCTION__);
106   status_t res;
107 
108   if (mStatus != STATUS_ERROR) {
109     ALOGE("%s: Already initialized!", __FUNCTION__);
110     return INVALID_OPERATION;
111   }
112 
113   res = getCameraCapabilities();
114   if (res != OK) {
115     ALOGE("%s: Unable to get camera capabilities: %s (%d)", __FUNCTION__,
116           strerror(-res), res);
117     return res;
118   }
119 
120   res = constructStaticInfo(params);
121   if (res != OK) {
122     ALOGE("%s: Unable to allocate static info: %s (%d)", __FUNCTION__,
123           strerror(-res), res);
124     return res;
125   }
126 
127   return EmulatedCamera3::Initialize(params);
128 }
129 
connectCamera(hw_device_t ** device)130 status_t EmulatedFakeCamera3::connectCamera(hw_device_t **device) {
131   ALOGV("%s: E", __FUNCTION__);
132   Mutex::Autolock l(mLock);
133   status_t res;
134 
135   if (mStatus != STATUS_CLOSED) {
136     ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
137     return INVALID_OPERATION;
138   }
139 
140   mSensor = new Sensor(mSensorWidth, mSensorHeight);
141   mSensor->setSensorListener(this);
142 
143   res = mSensor->startUp();
144   if (res != NO_ERROR) return res;
145 
146   mReadoutThread = new ReadoutThread(this);
147   mJpegCompressor = new JpegCompressor();
148 
149   res = mReadoutThread->run("EmuCam3::readoutThread");
150   if (res != NO_ERROR) return res;
151 
152   // Initialize fake 3A
153 
154   mControlMode = ANDROID_CONTROL_MODE_AUTO;
155   mFacePriority = false;
156   mAeMode = ANDROID_CONTROL_AE_MODE_ON;
157   mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
158   mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
159   mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
160   mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
161   mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
162   mAeCounter = 0;
163   mAeTargetExposureTime = kNormalExposureTime;
164   mAeCurrentExposureTime = kNormalExposureTime;
165   mAeCurrentSensitivity = kNormalSensitivity;
166 
167   return EmulatedCamera3::connectCamera(device);
168 }
169 
closeCamera()170 status_t EmulatedFakeCamera3::closeCamera() {
171   ALOGV("%s: E", __FUNCTION__);
172   status_t res;
173   {
174     Mutex::Autolock l(mLock);
175     if (mStatus == STATUS_CLOSED) return OK;
176 
177     res = mSensor->shutDown();
178     if (res != NO_ERROR) {
179       ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
180       return res;
181     }
182     mSensor.clear();
183 
184     mReadoutThread->requestExit();
185   }
186 
187   mReadoutThread->join();
188 
189   {
190     Mutex::Autolock l(mLock);
191     // Clear out private stream information
192     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
193       PrivateStreamInfo *privStream =
194           static_cast<PrivateStreamInfo *>((*s)->priv);
195       delete privStream;
196       (*s)->priv = NULL;
197     }
198     mStreams.clear();
199     mReadoutThread.clear();
200   }
201 
202   return EmulatedCamera3::closeCamera();
203 }
204 
getCameraInfo(struct camera_info * info)205 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
206   info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
207   info->orientation =
208       EmulatedCameraFactory::Instance().getFakeCameraOrientation();
209   info->resource_cost = 100;
210   info->conflicting_devices = NULL;
211   info->conflicting_devices_length = 0;
212   return EmulatedCamera3::getCameraInfo(info);
213 }
214 
setTorchMode(bool enabled)215 status_t EmulatedFakeCamera3::setTorchMode(bool enabled) {
216   if (!mFacingBack) {
217     ALOGE("%s: Front camera does not have flash unit", __FUNCTION__);
218     return INVALID_OPERATION;
219   }
220   EmulatedCameraFactory::Instance().onTorchModeStatusChanged(
221       mCameraID, enabled ? TORCH_MODE_STATUS_AVAILABLE_ON
222                          : TORCH_MODE_STATUS_AVAILABLE_OFF);
223   return NO_ERROR;
224 }
225 
226 /**
227  * Camera3 interface methods
228  */
229 
configureStreams(camera3_stream_configuration * streamList)230 status_t EmulatedFakeCamera3::configureStreams(
231     camera3_stream_configuration *streamList) {
232   Mutex::Autolock l(mLock);
233   ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
234 
235   if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
236     ALOGE("%s: Cannot configure streams in state %d", __FUNCTION__, mStatus);
237     return NO_INIT;
238   }
239 
240   /**
241    * Sanity-check input list.
242    */
243   if (streamList == NULL) {
244     ALOGE("%s: NULL stream configuration", __FUNCTION__);
245     return BAD_VALUE;
246   }
247 
248   if (streamList->streams == NULL) {
249     ALOGE("%s: NULL stream list", __FUNCTION__);
250     return BAD_VALUE;
251   }
252 
253   if (streamList->num_streams < 1) {
254     ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
255           streamList->num_streams);
256     return BAD_VALUE;
257   }
258 
259   camera3_stream_t *inputStream = NULL;
260   for (size_t i = 0; i < streamList->num_streams; i++) {
261     camera3_stream_t *newStream = streamList->streams[i];
262 
263     if (newStream == NULL) {
264       ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
265       return BAD_VALUE;
266     }
267 
268     ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
269           __FUNCTION__, newStream, i, newStream->stream_type, newStream->usage,
270           newStream->format);
271 
272     if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
273         newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
274       if (inputStream != NULL) {
275         ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
276         return BAD_VALUE;
277       }
278       inputStream = newStream;
279     }
280 
281     bool validFormat = false;
282     for (size_t f = 0;
283          f < sizeof(kAvailableFormats) / sizeof(kAvailableFormats[0]); f++) {
284       if (newStream->format == kAvailableFormats[f]) {
285         validFormat = true;
286         break;
287       }
288     }
289     if (!validFormat) {
290       ALOGE("%s: Unsupported stream format 0x%x requested", __FUNCTION__,
291             newStream->format);
292       return BAD_VALUE;
293     }
294   }
295   mInputStream = inputStream;
296 
297   /**
298    * Initially mark all existing streams as not alive
299    */
300   for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
301     PrivateStreamInfo *privStream =
302         static_cast<PrivateStreamInfo *>((*s)->priv);
303     privStream->alive = false;
304   }
305 
306   /**
307    * Find new streams and mark still-alive ones
308    */
309   for (size_t i = 0; i < streamList->num_streams; i++) {
310     camera3_stream_t *newStream = streamList->streams[i];
311     if (newStream->priv == NULL) {
312       // New stream, construct info
313       PrivateStreamInfo *privStream = new PrivateStreamInfo();
314       privStream->alive = true;
315 
316       newStream->max_buffers = kMaxBufferCount;
317       newStream->priv = privStream;
318       mStreams.push_back(newStream);
319     } else {
320       // Existing stream, mark as still alive.
321       PrivateStreamInfo *privStream =
322           static_cast<PrivateStreamInfo *>(newStream->priv);
323       privStream->alive = true;
324     }
325     // Always update usage and max buffers
326     newStream->max_buffers = kMaxBufferCount;
327     switch (newStream->stream_type) {
328       case CAMERA3_STREAM_OUTPUT:
329         newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
330         break;
331       case CAMERA3_STREAM_INPUT:
332         newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
333         break;
334       case CAMERA3_STREAM_BIDIRECTIONAL:
335         newStream->usage =
336             GRALLOC_USAGE_HW_CAMERA_READ | GRALLOC_USAGE_HW_CAMERA_WRITE;
337         break;
338     }
339   }
340 
341   /**
342    * Reap the dead streams
343    */
344   for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
345     PrivateStreamInfo *privStream =
346         static_cast<PrivateStreamInfo *>((*s)->priv);
347     if (!privStream->alive) {
348       (*s)->priv = NULL;
349       delete privStream;
350       s = mStreams.erase(s);
351     } else {
352       ++s;
353     }
354   }
355 
356   /**
357    * Can't reuse settings across configure call
358    */
359   mPrevSettings.clear();
360 
361   return OK;
362 }
363 
registerStreamBuffers(const camera3_stream_buffer_set *)364 status_t EmulatedFakeCamera3::registerStreamBuffers(
365     const camera3_stream_buffer_set * /*bufferSet*/) {
366   ALOGV("%s: E", __FUNCTION__);
367   Mutex::Autolock l(mLock);
368 
369   // Should not be called in HAL versions >= 3.2
370 
371   ALOGE("%s: Should not be invoked on new HALs!", __FUNCTION__);
372   return NO_INIT;
373 }
374 
constructDefaultRequestSettings(int type)375 const camera_metadata_t *EmulatedFakeCamera3::constructDefaultRequestSettings(
376     int type) {
377   ALOGV("%s: E", __FUNCTION__);
378   Mutex::Autolock l(mLock);
379 
380   if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
381     ALOGE("%s: Unknown request settings template: %d", __FUNCTION__, type);
382     return NULL;
383   }
384 
385   if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
386     ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
387           __FUNCTION__, type);
388     return NULL;
389   }
390 
391   /**
392    * Cache is not just an optimization - pointer returned has to live at
393    * least as long as the camera device instance does.
394    */
395   if (mDefaultTemplates[type] != NULL) {
396     return mDefaultTemplates[type];
397   }
398 
399   CameraMetadata settings;
400 
401   /** android.request */
402 
403   static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
404   settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
405 
406   static const int32_t id = 0;
407   settings.update(ANDROID_REQUEST_ID, &id, 1);
408 
409   static const int32_t frameCount = 0;
410   settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
411 
412   /** android.lens */
413 
414   static const float focalLength = 5.0f;
415   settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
416 
417   if (hasCapability(BACKWARD_COMPATIBLE)) {
418     static const float focusDistance = 0;
419     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
420 
421     static const float aperture = 2.8f;
422     settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
423 
424     static const float filterDensity = 0;
425     settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
426 
427     static const uint8_t opticalStabilizationMode =
428         ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
429     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
430                     &opticalStabilizationMode, 1);
431 
432     // FOCUS_RANGE set only in frame
433   }
434 
435   /** android.sensor */
436 
437   if (hasCapability(MANUAL_SENSOR)) {
438     static const int64_t exposureTime = 10 * MSEC;
439     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
440 
441     static const int64_t frameDuration = 33333333L;  // 1/30 s
442     settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
443 
444     static const int32_t sensitivity = 100;
445     settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
446   }
447 
448   // TIMESTAMP set only in frame
449 
450   /** android.flash */
451 
452   if (hasCapability(BACKWARD_COMPATIBLE)) {
453     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
454     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
455 
456     static const uint8_t flashPower = 10;
457     settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
458 
459     static const int64_t firingTime = 0;
460     settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
461   }
462 
463   /** Processing block modes */
464   if (hasCapability(MANUAL_POST_PROCESSING)) {
465     uint8_t hotPixelMode = 0;
466     uint8_t demosaicMode = 0;
467     uint8_t noiseMode = 0;
468     uint8_t shadingMode = 0;
469     uint8_t colorMode = 0;
470     uint8_t tonemapMode = 0;
471     uint8_t edgeMode = 0;
472     switch (type) {
473       case CAMERA3_TEMPLATE_STILL_CAPTURE:
474         // fall-through
475       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
476         // fall-through
477       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
478         hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
479         demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
480         noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
481         shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
482         colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
483         tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
484         edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
485         break;
486       case CAMERA3_TEMPLATE_PREVIEW:
487         // fall-through
488       case CAMERA3_TEMPLATE_VIDEO_RECORD:
489         // fall-through
490       default:
491         hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
492         demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
493         noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
494         shadingMode = ANDROID_SHADING_MODE_FAST;
495         colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
496         tonemapMode = ANDROID_TONEMAP_MODE_FAST;
497         edgeMode = ANDROID_EDGE_MODE_FAST;
498         break;
499     }
500     settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
501     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
502     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
503     settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
504     settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
505     settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
506     settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
507   }
508 
509   /** android.colorCorrection */
510 
511   if (hasCapability(MANUAL_POST_PROCESSING)) {
512     static const camera_metadata_rational colorTransform[9] = {
513         {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
514     settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
515 
516     static const float colorGains[4] = {1.0f, 1.0f, 1.0f, 1.0f};
517     settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
518   }
519 
520   /** android.tonemap */
521 
522   if (hasCapability(MANUAL_POST_PROCESSING)) {
523     static const float tonemapCurve[4] = {0.f, 0.f, 1.f, 1.f};
524     settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
525     settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
526     settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
527   }
528 
529   /** android.scaler */
530   if (hasCapability(BACKWARD_COMPATIBLE)) {
531     static const int32_t cropRegion[4] = {0, 0, mSensorWidth, mSensorHeight};
532     settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
533   }
534 
535   /** android.jpeg */
536   if (hasCapability(BACKWARD_COMPATIBLE)) {
537     static const uint8_t jpegQuality = 80;
538     settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
539 
540     static const int32_t thumbnailSize[2] = {640, 480};
541     settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
542 
543     static const uint8_t thumbnailQuality = 80;
544     settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
545 
546     static const double gpsCoordinates[2] = {0, 0};
547     settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
548 
549     static const uint8_t gpsProcessingMethod[32] = "None";
550     settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod,
551                     32);
552 
553     static const int64_t gpsTimestamp = 0;
554     settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
555 
556     static const int32_t jpegOrientation = 0;
557     settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
558   }
559 
560   /** android.stats */
561 
562   if (hasCapability(BACKWARD_COMPATIBLE)) {
563     static const uint8_t faceDetectMode =
564         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
565     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
566 
567     static const uint8_t hotPixelMapMode =
568         ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
569     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
570   }
571 
572   // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
573   // sharpnessMap only in frames
574 
575   /** android.control */
576 
577   uint8_t controlIntent = 0;
578   switch (type) {
579     case CAMERA3_TEMPLATE_PREVIEW:
580       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
581       break;
582     case CAMERA3_TEMPLATE_STILL_CAPTURE:
583       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
584       break;
585     case CAMERA3_TEMPLATE_VIDEO_RECORD:
586       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
587       break;
588     case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
589       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
590       break;
591     case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
592       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
593       break;
594     case CAMERA3_TEMPLATE_MANUAL:
595       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
596       break;
597     default:
598       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
599       break;
600   }
601   settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
602 
603   const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL)
604                                   ? ANDROID_CONTROL_MODE_OFF
605                                   : ANDROID_CONTROL_MODE_AUTO;
606   settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
607 
608   int32_t aeTargetFpsRange[2] = {5, 30};
609   if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
610       type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
611     aeTargetFpsRange[0] = 30;
612   }
613   settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
614 
615   if (hasCapability(BACKWARD_COMPATIBLE)) {
616     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
617     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
618 
619     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
620     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
621 
622     const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL)
623                                ? ANDROID_CONTROL_AE_MODE_OFF
624                                : ANDROID_CONTROL_AE_MODE_ON;
625     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
626 
627     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
628     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
629 
630     static const int32_t controlRegions[5] = {0, 0, 0, 0, 0};
631     settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
632 
633     static const int32_t aeExpCompensation = 0;
634     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
635                     &aeExpCompensation, 1);
636 
637     static const uint8_t aeAntibandingMode =
638         ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
639     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
640 
641     static const uint8_t aePrecaptureTrigger =
642         ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
643     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger,
644                     1);
645 
646     const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL)
647                                 ? ANDROID_CONTROL_AWB_MODE_OFF
648                                 : ANDROID_CONTROL_AWB_MODE_AUTO;
649     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
650 
651     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
652     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
653 
654     uint8_t afMode = 0;
655 
656     if (mFacingBack) {
657       switch (type) {
658         case CAMERA3_TEMPLATE_PREVIEW:
659           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
660           break;
661         case CAMERA3_TEMPLATE_STILL_CAPTURE:
662           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
663           break;
664         case CAMERA3_TEMPLATE_VIDEO_RECORD:
665           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
666           break;
667         case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
668           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
669           break;
670         case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
671           afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
672           break;
673         case CAMERA3_TEMPLATE_MANUAL:
674           afMode = ANDROID_CONTROL_AF_MODE_OFF;
675           break;
676         default:
677           afMode = ANDROID_CONTROL_AF_MODE_AUTO;
678           break;
679       }
680     } else {
681       afMode = ANDROID_CONTROL_AF_MODE_OFF;
682     }
683     settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
684 
685     settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
686 
687     static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
688     settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
689 
690     static const uint8_t vstabMode =
691         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
692     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
693 
694     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
695     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
696 
697     static const uint8_t lensShadingMapMode =
698         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
699     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
700                     &lensShadingMapMode, 1);
701 
702     static const uint8_t aberrationMode =
703         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
704     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode,
705                     1);
706 
707     static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
708     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
709   }
710 
711   mDefaultTemplates[type] = settings.release();
712 
713   return mDefaultTemplates[type];
714 }
715 
processCaptureRequest(camera3_capture_request * request)716 status_t EmulatedFakeCamera3::processCaptureRequest(
717     camera3_capture_request *request) {
718   Mutex::Autolock l(mLock);
719   status_t res;
720 
721   /** Validation */
722 
723   if (mStatus < STATUS_READY) {
724     ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
725           mStatus);
726     return INVALID_OPERATION;
727   }
728 
729   if (request == NULL) {
730     ALOGE("%s: NULL request!", __FUNCTION__);
731     return BAD_VALUE;
732   }
733 
734   uint32_t frameNumber = request->frame_number;
735 
736   if (request->settings == NULL && mPrevSettings.isEmpty()) {
737     ALOGE(
738         "%s: Request %d: NULL settings for first request after"
739         "configureStreams()",
740         __FUNCTION__, frameNumber);
741     return BAD_VALUE;
742   }
743 
744   if (request->input_buffer != NULL &&
745       request->input_buffer->stream != mInputStream) {
746     ALOGE("%s: Request %d: Input buffer not from input stream!", __FUNCTION__,
747           frameNumber);
748     ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
749           request->input_buffer->stream, mInputStream);
750     ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
751           request->input_buffer->stream->stream_type,
752           mInputStream ? mInputStream->stream_type : -1);
753 
754     return BAD_VALUE;
755   }
756 
757   if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
758     ALOGE("%s: Request %d: No output buffers provided!", __FUNCTION__,
759           frameNumber);
760     return BAD_VALUE;
761   }
762 
763   // Validate all buffers, starting with input buffer if it's given
764 
765   ssize_t idx;
766   const camera3_stream_buffer_t *b;
767   if (request->input_buffer != NULL) {
768     idx = -1;
769     b = request->input_buffer;
770   } else {
771     idx = 0;
772     b = request->output_buffers;
773   }
774   do {
775     PrivateStreamInfo *priv = static_cast<PrivateStreamInfo *>(b->stream->priv);
776     if (priv == NULL) {
777       ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", __FUNCTION__,
778             frameNumber, idx);
779       return BAD_VALUE;
780     }
781     if (!priv->alive) {
782       ALOGE("%s: Request %d: Buffer %zu: Dead stream!", __FUNCTION__,
783             frameNumber, idx);
784       return BAD_VALUE;
785     }
786     if (b->status != CAMERA3_BUFFER_STATUS_OK) {
787       ALOGE("%s: Request %d: Buffer %zu: Status not OK!", __FUNCTION__,
788             frameNumber, idx);
789       return BAD_VALUE;
790     }
791     if (b->release_fence != -1) {
792       ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", __FUNCTION__,
793             frameNumber, idx);
794       return BAD_VALUE;
795     }
796     if (b->buffer == NULL) {
797       ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", __FUNCTION__,
798             frameNumber, idx);
799       return BAD_VALUE;
800     }
801     idx++;
802     b = &(request->output_buffers[idx]);
803   } while (idx < (ssize_t)request->num_output_buffers);
804 
805   // TODO: Validate settings parameters
806 
807   /**
808    * Start processing this request
809    */
810 
811   mStatus = STATUS_ACTIVE;
812 
813   CameraMetadata settings;
814 
815   if (request->settings == NULL) {
816     settings.acquire(mPrevSettings);
817   } else {
818     settings = request->settings;
819   }
820 
821   res = process3A(settings);
822   if (res != OK) {
823     return res;
824   }
825 
826   // TODO: Handle reprocessing
827 
828   /**
829    * Get ready for sensor config
830    */
831 
832   nsecs_t exposureTime;
833   nsecs_t frameDuration;
834   uint32_t sensitivity;
835   bool needJpeg = false;
836   camera_metadata_entry_t entry;
837 
838   entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
839   exposureTime =
840       (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
841   entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
842   frameDuration =
843       (entry.count > 0) ? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
844   entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
845   sensitivity =
846       (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
847 
848   if (exposureTime > frameDuration) {
849     frameDuration = exposureTime + Sensor::kMinVerticalBlank;
850     settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
851   }
852 
853   Buffers *sensorBuffers = new Buffers();
854   HalBufferVector *buffers = new HalBufferVector();
855 
856   sensorBuffers->setCapacity(request->num_output_buffers);
857   buffers->setCapacity(request->num_output_buffers);
858 
859   // Process all the buffers we got for output, constructing internal buffer
860   // structures for them, and lock them for writing.
861   for (size_t i = 0; i < request->num_output_buffers; i++) {
862     const camera3_stream_buffer &srcBuf = request->output_buffers[i];
863     StreamBuffer destBuf;
864     destBuf.streamId = kGenericStreamId;
865     destBuf.width = srcBuf.stream->width;
866     destBuf.height = srcBuf.stream->height;
867     // For GCE, IMPLEMENTATION_DEFINED is always RGBx_8888
868     destBuf.format =
869         (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)
870             ? HAL_PIXEL_FORMAT_RGBA_8888
871             : srcBuf.stream->format;
872     destBuf.stride = srcBuf.stream->width;
873     destBuf.dataSpace = srcBuf.stream->data_space;
874     destBuf.buffer = srcBuf.buffer;
875 
876     if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
877       needJpeg = true;
878     }
879 
880     // Wait on fence
881     sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
882     res = bufferAcquireFence->wait(kFenceTimeoutMs);
883     if (res == TIMED_OUT) {
884       ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
885             __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
886     }
887     if (res == OK) {
888       // Lock buffer for writing
889       if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
890         if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
891           android_ycbcr ycbcr = android_ycbcr();
892           res = GrallocModule::getInstance().lock_ycbcr(
893               *(destBuf.buffer), GRALLOC_USAGE_HW_CAMERA_WRITE, 0, 0,
894               destBuf.width, destBuf.height, &ycbcr);
895           // This is only valid because we know that emulator's
896           // YCbCr_420_888 is really contiguous NV21 under the hood
897           destBuf.img = static_cast<uint8_t *>(ycbcr.y);
898         } else {
899           ALOGE("Unexpected private format for flexible YUV: 0x%x",
900                 destBuf.format);
901           res = INVALID_OPERATION;
902         }
903       } else {
904         res = GrallocModule::getInstance().lock(
905             *(destBuf.buffer), GRALLOC_USAGE_HW_CAMERA_WRITE, 0, 0,
906             destBuf.width, destBuf.height, (void **)&(destBuf.img));
907       }
908       if (res != OK) {
909         ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", __FUNCTION__,
910               frameNumber, i);
911       }
912     }
913 
914     if (res != OK) {
915       // Either waiting or locking failed. Unlock locked buffers and bail
916       // out.
917       for (size_t j = 0; j < i; j++) {
918         GrallocModule::getInstance().unlock(
919             *(request->output_buffers[i].buffer));
920       }
921       delete sensorBuffers;
922       delete buffers;
923       return NO_INIT;
924     }
925 
926     sensorBuffers->push_back(destBuf);
927     buffers->push_back(srcBuf);
928   }
929 
930   /**
931    * Wait for JPEG compressor to not be busy, if needed
932    */
933   if (needJpeg) {
934     bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
935     if (!ready) {
936       ALOGE("%s: Timeout waiting for JPEG compression to complete!",
937             __FUNCTION__);
938       return NO_INIT;
939     }
940     res = mJpegCompressor->reserve();
941     if (res != OK) {
942       ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!",
943             __FUNCTION__);
944       return NO_INIT;
945     }
946   }
947 
948   /**
949    * Wait until the in-flight queue has room
950    */
951   res = mReadoutThread->waitForReadout();
952   if (res != OK) {
953     ALOGE("%s: Timeout waiting for previous requests to complete!",
954           __FUNCTION__);
955     return NO_INIT;
956   }
957 
958   /**
959    * Wait until sensor's ready. This waits for lengthy amounts of time with
960    * mLock held, but the interface spec is that no other calls may by done to
961    * the HAL by the framework while process_capture_request is happening.
962    */
963   int syncTimeoutCount = 0;
964   while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
965     if (mStatus == STATUS_ERROR) {
966       return NO_INIT;
967     }
968     if (syncTimeoutCount == kMaxSyncTimeoutCount) {
969       ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
970             __FUNCTION__, frameNumber,
971             kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
972       return NO_INIT;
973     }
974     syncTimeoutCount++;
975   }
976 
977   /**
978    * Configure sensor and queue up the request to the readout thread
979    */
980   mSensor->setExposureTime(exposureTime);
981   mSensor->setFrameDuration(frameDuration);
982   mSensor->setSensitivity(sensitivity);
983   mSensor->setDestinationBuffers(sensorBuffers);
984   mSensor->setFrameNumber(request->frame_number);
985 
986   ReadoutThread::Request r;
987   r.frameNumber = request->frame_number;
988   r.settings = settings;
989   r.sensorBuffers = sensorBuffers;
990   r.buffers = buffers;
991 
992   mReadoutThread->queueCaptureRequest(r);
993   ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
994 
995   // Cache the settings for next time
996   mPrevSettings.acquire(settings);
997 
998   return OK;
999 }
1000 
flush()1001 status_t EmulatedFakeCamera3::flush() {
1002   ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1003   return OK;
1004 }
1005 
1006 /** Debug methods */
1007 
dump(int)1008 void EmulatedFakeCamera3::dump(int /*fd*/) {}
1009 
1010 /**
1011  * Private methods
1012  */
1013 
getCameraCapabilities()1014 status_t EmulatedFakeCamera3::getCameraCapabilities() {
1015   const char *key =
1016       mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
1017 
1018   /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
1019    * property doesn't exist, it is assumed to list FULL. */
1020   char prop[PROPERTY_VALUE_MAX];
1021   if (property_get(key, prop, NULL) > 0) {
1022     char *saveptr = nullptr;
1023     char *cap = strtok_r(prop, " ,", &saveptr);
1024     while (cap != NULL) {
1025       for (int i = 0; i < NUM_CAPABILITIES; i++) {
1026         if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1027           mCapabilities.add(static_cast<AvailableCapabilities>(i));
1028           break;
1029         }
1030       }
1031       cap = strtok_r(NULL, " ,", &saveptr);
1032     }
1033     if (mCapabilities.size() == 0) {
1034       ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1035     }
1036   }
1037   // Default to FULL_LEVEL plus RAW if nothing is defined
1038   if (mCapabilities.size() == 0) {
1039     mCapabilities.add(FULL_LEVEL);
1040     mCapabilities.add(RAW);
1041   }
1042 
1043   // Add level-based caps
1044   if (hasCapability(FULL_LEVEL)) {
1045     mCapabilities.add(BURST_CAPTURE);
1046     mCapabilities.add(READ_SENSOR_SETTINGS);
1047     mCapabilities.add(MANUAL_SENSOR);
1048     mCapabilities.add(MANUAL_POST_PROCESSING);
1049   };
1050 
1051   // Backwards-compatible is required for most other caps
1052   // Not required for DEPTH_OUTPUT, though.
1053   if (hasCapability(BURST_CAPTURE) || hasCapability(READ_SENSOR_SETTINGS) ||
1054       hasCapability(RAW) || hasCapability(MANUAL_SENSOR) ||
1055       hasCapability(MANUAL_POST_PROCESSING) ||
1056       hasCapability(PRIVATE_REPROCESSING) || hasCapability(YUV_REPROCESSING) ||
1057       hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
1058     mCapabilities.add(BACKWARD_COMPATIBLE);
1059   }
1060 
1061   ALOGI("Camera %d capabilities:", mCameraID);
1062   for (size_t i = 0; i < mCapabilities.size(); i++) {
1063     ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1064   }
1065 
1066   return OK;
1067 }
1068 
hasCapability(AvailableCapabilities cap)1069 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
1070   ssize_t idx = mCapabilities.indexOf(cap);
1071   return idx >= 0;
1072 }
1073 
constructStaticInfo(const cvd::CameraDefinition & params)1074 status_t EmulatedFakeCamera3::constructStaticInfo(
1075     const cvd::CameraDefinition &params) {
1076   CameraMetadata info;
1077   Vector<int32_t> availableCharacteristicsKeys;
1078   status_t res;
1079 
1080   int32_t width = 0, height = 0;
1081 
1082   /* TODO(ender): this currently supports only maximum resolution. */
1083   for (size_t index = 0; index < params.resolutions.size(); ++index) {
1084     if (width <= params.resolutions[index].width &&
1085         height <= params.resolutions[index].height) {
1086       width = params.resolutions[index].width;
1087       height = params.resolutions[index].height;
1088     }
1089   }
1090 
1091   if (width < 640 || height < 480) {
1092     width = 640;
1093     height = 480;
1094   }
1095 
1096   mSensorWidth = width;
1097   mSensorHeight = height;
1098 
1099 #define ADD_STATIC_ENTRY(name, varptr, count) \
1100   availableCharacteristicsKeys.add(name);     \
1101   res = info.update(name, varptr, count);     \
1102   if (res != OK) return res
1103 
1104   // android.sensor
1105 
1106   if (hasCapability(MANUAL_SENSOR)) {
1107     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1108                      Sensor::kExposureTimeRange, 2);
1109 
1110     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1111                      &Sensor::kFrameDurationRange[1], 1);
1112 
1113     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1114                      Sensor::kSensitivityRange,
1115                      sizeof(Sensor::kSensitivityRange) / sizeof(int32_t));
1116 
1117     ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1118                      &Sensor::kSensitivityRange[1], 1);
1119   }
1120 
1121   static const float sensorPhysicalSize[2] = {3.20f, 2.40f};  // mm
1122   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorPhysicalSize, 2);
1123 
1124   const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
1125   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArray, 2);
1126   const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
1127   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArray, 4);
1128 
1129   static const int32_t orientation = 90;  // Aligned with 'long edge'
1130   ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1131 
1132   static const uint8_t timestampSource =
1133       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
1134   ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1135 
1136   if (hasCapability(RAW)) {
1137     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1138                      &Sensor::kColorFilterArrangement, 1);
1139 
1140     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1141                      (int32_t *)&Sensor::kMaxRawValue, 1);
1142 
1143     static const int32_t blackLevelPattern[4] = {
1144         (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1145         (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel};
1146     ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, blackLevelPattern,
1147                      sizeof(blackLevelPattern) / sizeof(int32_t));
1148   }
1149 
1150   if (hasCapability(BACKWARD_COMPATIBLE)) {
1151     static const int32_t availableTestPatternModes[] = {
1152         ANDROID_SENSOR_TEST_PATTERN_MODE_OFF};
1153     ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1154                      availableTestPatternModes,
1155                      sizeof(availableTestPatternModes) / sizeof(int32_t));
1156   }
1157 
1158   // android.lens
1159 
1160   static const float focalLength = 3.30f;  // mm
1161   ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &focalLength, 1);
1162 
1163   if (hasCapability(BACKWARD_COMPATIBLE)) {
1164     // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1165     const float minFocusDistance = mFacingBack ? 1.0 / 0.05 : 0.0;
1166     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1167                      &minFocusDistance, 1);
1168 
1169     // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1170     const float hyperFocalDistance = mFacingBack ? 1.0 / 5.0 : 0.0;
1171     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, &hyperFocalDistance,
1172                      1);
1173 
1174     static const float aperture = 2.8f;
1175     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES, &aperture, 1);
1176     static const float filterDensity = 0;
1177     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1178                      &filterDensity, 1);
1179     static const uint8_t availableOpticalStabilization =
1180         ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1181     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1182                      &availableOpticalStabilization, 1);
1183 
1184     static const int32_t lensShadingMapSize[] = {1, 1};
1185     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1186                      sizeof(lensShadingMapSize) / sizeof(int32_t));
1187 
1188     static const uint8_t lensFocusCalibration =
1189         ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1190     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
1191                      &lensFocusCalibration, 1);
1192   }
1193 
1194   if (hasCapability(DEPTH_OUTPUT)) {
1195     // These could be included for non-DEPTH capability as well, but making this
1196     // variable for testing coverage
1197 
1198     // 90 degree rotation to align with long edge of a phone device that's by
1199     // default portrait
1200     static const float qO[] = {0.707107f, 0.f, 0.f, 0.707107f};
1201 
1202     // Either a 180-degree rotation for back-facing, or no rotation for
1203     // front-facing
1204     const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0,
1205                         (mFacingBack ? 0.f : 1.f)};
1206 
1207     // Quarternion product, orientation change then facing
1208     const float lensPoseRotation[] = {
1209         qO[0] * qF[0] - qO[1] * qF[1] - qO[2] * qF[2] - qO[3] * qF[3],
1210         qO[0] * qF[1] + qO[1] * qF[0] + qO[2] * qF[3] - qO[3] * qF[2],
1211         qO[0] * qF[2] + qO[2] * qF[0] + qO[1] * qF[3] - qO[3] * qF[1],
1212         qO[0] * qF[3] + qO[3] * qF[0] + qO[1] * qF[2] - qO[2] * qF[1]};
1213 
1214     ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
1215                      sizeof(lensPoseRotation) / sizeof(float));
1216 
1217     // Only one camera facing each way, so 0 translation needed to the center of
1218     // the 'main' camera
1219     static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
1220 
1221     ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
1222                      sizeof(lensPoseTranslation) / sizeof(float));
1223 
1224     // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and
1225     // active array size
1226     float f_x = focalLength * mSensorWidth / sensorPhysicalSize[0];
1227     float f_y = focalLength * mSensorHeight / sensorPhysicalSize[1];
1228     float c_x = mSensorWidth / 2.f;
1229     float c_y = mSensorHeight / 2.f;
1230     float s = 0.f;
1231     const float lensIntrinsics[] = {f_x, f_y, c_x, c_y, s};
1232 
1233     ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
1234                      sizeof(lensIntrinsics) / sizeof(float));
1235 
1236     // No radial or tangential distortion
1237 
1238     float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
1239 
1240     ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
1241                      sizeof(lensRadialDistortion) / sizeof(float));
1242   }
1243 
1244   const uint8_t lensFacing =
1245       mFacingBack ? ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1246   ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1247 
1248   // android.flash
1249 
1250   const uint8_t flashAvailable = mFacingBack;
1251   ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1252 
1253   // android.tonemap
1254 
1255   if (hasCapability(MANUAL_POST_PROCESSING)) {
1256     static const int32_t tonemapCurvePoints = 128;
1257     ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1258 
1259     static const uint8_t availableToneMapModes[] = {
1260         ANDROID_TONEMAP_MODE_CONTRAST_CURVE, ANDROID_TONEMAP_MODE_FAST,
1261         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
1262     ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
1263                      availableToneMapModes, sizeof(availableToneMapModes));
1264   }
1265 
1266   // android.scaler
1267 
1268   const std::vector<int32_t> availableStreamConfigurationsBasic = {
1269       HAL_PIXEL_FORMAT_BLOB,
1270       width,
1271       height,
1272       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1273       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1274       320,
1275       240,
1276       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1277       HAL_PIXEL_FORMAT_YCbCr_420_888,
1278       320,
1279       240,
1280       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1281       HAL_PIXEL_FORMAT_BLOB,
1282       320,
1283       240,
1284       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1285   };
1286 
1287   // Always need to include 640x480 in basic formats
1288   const std::vector<int32_t> availableStreamConfigurationsBasic640 = {
1289       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1290       640,
1291       480,
1292       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1293       HAL_PIXEL_FORMAT_YCbCr_420_888,
1294       640,
1295       480,
1296       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1297       HAL_PIXEL_FORMAT_BLOB,
1298       640,
1299       480,
1300       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
1301 
1302   const std::vector<int32_t> availableStreamConfigurationsRaw = {
1303       HAL_PIXEL_FORMAT_RAW16,
1304       width,
1305       height,
1306       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1307   };
1308 
1309   const std::vector<int32_t> availableStreamConfigurationsBurst = {
1310       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1311       width,
1312       height,
1313       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1314       HAL_PIXEL_FORMAT_YCbCr_420_888,
1315       width,
1316       height,
1317       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1318       HAL_PIXEL_FORMAT_RGBA_8888,
1319       width,
1320       height,
1321       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1322   };
1323 
1324   std::vector<int32_t> availableStreamConfigurations;
1325 
1326   if (hasCapability(BACKWARD_COMPATIBLE)) {
1327     availableStreamConfigurations.insert(
1328         availableStreamConfigurations.end(),
1329         availableStreamConfigurationsBasic.begin(),
1330         availableStreamConfigurationsBasic.end());
1331     if (width > 640) {
1332       availableStreamConfigurations.insert(
1333           availableStreamConfigurations.end(),
1334           availableStreamConfigurationsBasic640.begin(),
1335           availableStreamConfigurationsBasic640.end());
1336     }
1337   }
1338   if (hasCapability(RAW)) {
1339     availableStreamConfigurations.insert(
1340         availableStreamConfigurations.end(),
1341         availableStreamConfigurationsRaw.begin(),
1342         availableStreamConfigurationsRaw.end());
1343   }
1344   if (hasCapability(BURST_CAPTURE)) {
1345     availableStreamConfigurations.insert(
1346         availableStreamConfigurations.end(),
1347         availableStreamConfigurationsBurst.begin(),
1348         availableStreamConfigurationsBurst.end());
1349   }
1350 
1351   if (availableStreamConfigurations.size() > 0) {
1352     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1353                      &availableStreamConfigurations[0],
1354                      availableStreamConfigurations.size());
1355   }
1356 
1357   const std::vector<int64_t> availableMinFrameDurationsBasic = {
1358       HAL_PIXEL_FORMAT_BLOB,
1359       width,
1360       height,
1361       Sensor::kFrameDurationRange[0],
1362       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1363       320,
1364       240,
1365       Sensor::kFrameDurationRange[0],
1366       HAL_PIXEL_FORMAT_YCbCr_420_888,
1367       320,
1368       240,
1369       Sensor::kFrameDurationRange[0],
1370       HAL_PIXEL_FORMAT_BLOB,
1371       320,
1372       240,
1373       Sensor::kFrameDurationRange[0],
1374   };
1375 
1376   // Always need to include 640x480 in basic formats
1377   const std::vector<int64_t> availableMinFrameDurationsBasic640 = {
1378       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1379       640,
1380       480,
1381       Sensor::kFrameDurationRange[0],
1382       HAL_PIXEL_FORMAT_YCbCr_420_888,
1383       640,
1384       480,
1385       Sensor::kFrameDurationRange[0],
1386       HAL_PIXEL_FORMAT_BLOB,
1387       640,
1388       480,
1389       Sensor::kFrameDurationRange[0]};
1390 
1391   const std::vector<int64_t> availableMinFrameDurationsRaw = {
1392       HAL_PIXEL_FORMAT_RAW16,
1393       width,
1394       height,
1395       Sensor::kFrameDurationRange[0],
1396   };
1397 
1398   const std::vector<int64_t> availableMinFrameDurationsBurst = {
1399       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1400       width,
1401       height,
1402       Sensor::kFrameDurationRange[0],
1403       HAL_PIXEL_FORMAT_YCbCr_420_888,
1404       width,
1405       height,
1406       Sensor::kFrameDurationRange[0],
1407       HAL_PIXEL_FORMAT_RGBA_8888,
1408       width,
1409       height,
1410       Sensor::kFrameDurationRange[0],
1411   };
1412 
1413   std::vector<int64_t> availableMinFrameDurations;
1414 
1415   if (hasCapability(BACKWARD_COMPATIBLE)) {
1416     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1417                                       availableMinFrameDurationsBasic.begin(),
1418                                       availableMinFrameDurationsBasic.end());
1419     if (width > 640) {
1420       availableMinFrameDurations.insert(
1421           availableMinFrameDurations.end(),
1422           availableMinFrameDurationsBasic640.begin(),
1423           availableMinFrameDurationsBasic640.end());
1424     }
1425   }
1426   if (hasCapability(RAW)) {
1427     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1428                                       availableMinFrameDurationsRaw.begin(),
1429                                       availableMinFrameDurationsRaw.end());
1430   }
1431   if (hasCapability(BURST_CAPTURE)) {
1432     availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1433                                       availableMinFrameDurationsBurst.begin(),
1434                                       availableMinFrameDurationsBurst.end());
1435   }
1436 
1437   if (availableMinFrameDurations.size() > 0) {
1438     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1439                      &availableMinFrameDurations[0],
1440                      availableMinFrameDurations.size());
1441   }
1442 
1443   const std::vector<int64_t> availableStallDurationsBasic = {
1444       HAL_PIXEL_FORMAT_BLOB,
1445       width,
1446       height,
1447       Sensor::kFrameDurationRange[0],
1448       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1449       320,
1450       240,
1451       0,
1452       HAL_PIXEL_FORMAT_YCbCr_420_888,
1453       320,
1454       240,
1455       0,
1456       HAL_PIXEL_FORMAT_RGBA_8888,
1457       320,
1458       240,
1459       0,
1460   };
1461 
1462   // Always need to include 640x480 in basic formats
1463   const std::vector<int64_t> availableStallDurationsBasic640 = {
1464       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1465       640,
1466       480,
1467       0,
1468       HAL_PIXEL_FORMAT_YCbCr_420_888,
1469       640,
1470       480,
1471       0,
1472       HAL_PIXEL_FORMAT_BLOB,
1473       640,
1474       480,
1475       Sensor::kFrameDurationRange[0]};
1476 
1477   const std::vector<int64_t> availableStallDurationsRaw = {
1478       HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0]};
1479   const std::vector<int64_t> availableStallDurationsBurst = {
1480       HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1481       width,
1482       height,
1483       0,
1484       HAL_PIXEL_FORMAT_YCbCr_420_888,
1485       width,
1486       height,
1487       0,
1488       HAL_PIXEL_FORMAT_RGBA_8888,
1489       width,
1490       height,
1491       0};
1492 
1493   std::vector<int64_t> availableStallDurations;
1494 
1495   if (hasCapability(BACKWARD_COMPATIBLE)) {
1496     availableStallDurations.insert(availableStallDurations.end(),
1497                                    availableStallDurationsBasic.begin(),
1498                                    availableStallDurationsBasic.end());
1499     if (width > 640) {
1500       availableStallDurations.insert(availableStallDurations.end(),
1501                                      availableStallDurationsBasic640.begin(),
1502                                      availableStallDurationsBasic640.end());
1503     }
1504   }
1505   if (hasCapability(RAW)) {
1506     availableStallDurations.insert(availableStallDurations.end(),
1507                                    availableStallDurationsRaw.begin(),
1508                                    availableStallDurationsRaw.end());
1509   }
1510   if (hasCapability(BURST_CAPTURE)) {
1511     availableStallDurations.insert(availableStallDurations.end(),
1512                                    availableStallDurationsBurst.begin(),
1513                                    availableStallDurationsBurst.end());
1514   }
1515 
1516   if (availableStallDurations.size() > 0) {
1517     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1518                      &availableStallDurations[0],
1519                      availableStallDurations.size());
1520   }
1521 
1522   if (hasCapability(BACKWARD_COMPATIBLE)) {
1523     static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1524     ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1525 
1526     static const float maxZoom = 10;
1527     ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1);
1528   }
1529 
1530   // android.jpeg
1531 
1532   if (hasCapability(BACKWARD_COMPATIBLE)) {
1533     static const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
1534     ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
1535                      sizeof(jpegThumbnailSizes) / sizeof(int32_t));
1536 
1537     static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1538     ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1539   }
1540 
1541   // android.stats
1542 
1543   if (hasCapability(BACKWARD_COMPATIBLE)) {
1544     static const uint8_t availableFaceDetectModes[] = {
1545         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1546         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1547         ANDROID_STATISTICS_FACE_DETECT_MODE_FULL};
1548     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1549                      availableFaceDetectModes,
1550                      sizeof(availableFaceDetectModes));
1551 
1552     static const int32_t maxFaceCount = 8;
1553     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
1554 
1555     static const uint8_t availableShadingMapModes[] = {
1556         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF};
1557     ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1558                      availableShadingMapModes,
1559                      sizeof(availableShadingMapModes));
1560   }
1561 
1562   // android.sync
1563 
1564   static const int32_t maxLatency =
1565       hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL
1566                                 : 3;
1567   ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1568 
1569   // android.control
1570 
1571   if (hasCapability(BACKWARD_COMPATIBLE)) {
1572     static const uint8_t availableControlModes[] = {
1573         ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO,
1574         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
1575     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1576                      sizeof(availableControlModes));
1577   } else {
1578     static const uint8_t availableControlModes[] = {ANDROID_CONTROL_MODE_AUTO};
1579     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, availableControlModes,
1580                      sizeof(availableControlModes));
1581   }
1582 
1583   static const uint8_t availableSceneModes[] = {
1584     static_cast<uint8_t>(hasCapability(BACKWARD_COMPATIBLE)
1585                          ? ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1586                          : ANDROID_CONTROL_SCENE_MODE_DISABLED)};
1587   ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, availableSceneModes,
1588                    sizeof(availableSceneModes));
1589 
1590   if (hasCapability(BACKWARD_COMPATIBLE)) {
1591     static const uint8_t availableEffects[] = {ANDROID_CONTROL_EFFECT_MODE_OFF};
1592     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS, availableEffects,
1593                      sizeof(availableEffects));
1594   }
1595 
1596   if (hasCapability(BACKWARD_COMPATIBLE)) {
1597     static const int32_t max3aRegions[] = {/*AE*/ 1, /*AWB*/ 0, /*AF*/ 1};
1598     ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS, max3aRegions,
1599                      sizeof(max3aRegions) / sizeof(max3aRegions[0]));
1600 
1601     static const uint8_t availableAeModes[] = {ANDROID_CONTROL_AE_MODE_OFF,
1602                                                ANDROID_CONTROL_AE_MODE_ON};
1603     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES, availableAeModes,
1604                      sizeof(availableAeModes));
1605 
1606     static const camera_metadata_rational exposureCompensationStep = {1, 3};
1607     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1608                      &exposureCompensationStep, 1);
1609 
1610     int32_t exposureCompensationRange[] = {-9, 9};
1611     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1612                      exposureCompensationRange,
1613                      sizeof(exposureCompensationRange) / sizeof(int32_t));
1614   }
1615 
1616   static const int32_t availableTargetFpsRanges[] = {5,  30, 15, 30,
1617                                                      15, 15, 30, 30};
1618   ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1619                    availableTargetFpsRanges,
1620                    sizeof(availableTargetFpsRanges) / sizeof(int32_t));
1621 
1622   if (hasCapability(BACKWARD_COMPATIBLE)) {
1623     static const uint8_t availableAntibandingModes[] = {
1624         ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1625         ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO};
1626     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1627                      availableAntibandingModes,
1628                      sizeof(availableAntibandingModes));
1629   }
1630 
1631   static const uint8_t aeLockAvailable =
1632       hasCapability(BACKWARD_COMPATIBLE)
1633           ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
1634           : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1635 
1636   ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
1637 
1638   if (hasCapability(BACKWARD_COMPATIBLE)) {
1639     static const uint8_t availableAwbModes[] = {
1640         ANDROID_CONTROL_AWB_MODE_OFF,
1641         ANDROID_CONTROL_AWB_MODE_AUTO,
1642         ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1643         ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1644         ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1645         ANDROID_CONTROL_AWB_MODE_SHADE};
1646     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES, availableAwbModes,
1647                      sizeof(availableAwbModes));
1648   }
1649 
1650   static const uint8_t awbLockAvailable =
1651       hasCapability(BACKWARD_COMPATIBLE)
1652           ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
1653           : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1654 
1655   ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
1656 
1657   static const uint8_t availableAfModesBack[] = {
1658       ANDROID_CONTROL_AF_MODE_OFF, ANDROID_CONTROL_AF_MODE_AUTO,
1659       ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1660       ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE};
1661 
1662   static const uint8_t availableAfModesFront[] = {ANDROID_CONTROL_AF_MODE_OFF};
1663 
1664   if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1665     ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesBack,
1666                      sizeof(availableAfModesBack));
1667   } else {
1668     ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesFront,
1669                      sizeof(availableAfModesFront));
1670   }
1671 
1672   static const uint8_t availableVstabModes[] = {
1673       ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
1674   ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1675                    availableVstabModes, sizeof(availableVstabModes));
1676 
1677   // android.colorCorrection
1678 
1679   if (hasCapability(BACKWARD_COMPATIBLE)) {
1680     static const uint8_t availableAberrationModes[] = {
1681         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1682         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1683         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
1684     ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1685                      availableAberrationModes,
1686                      sizeof(availableAberrationModes));
1687   } else {
1688     static const uint8_t availableAberrationModes[] = {
1689         ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1690     };
1691     ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1692                      availableAberrationModes,
1693                      sizeof(availableAberrationModes));
1694   }
1695   // android.edge
1696 
1697   if (hasCapability(BACKWARD_COMPATIBLE)) {
1698     static const uint8_t availableEdgeModes[] = {
1699         ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST,
1700         ANDROID_EDGE_MODE_HIGH_QUALITY};
1701     ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1702                      sizeof(availableEdgeModes));
1703   } else {
1704     static const uint8_t availableEdgeModes[] = {ANDROID_EDGE_MODE_OFF};
1705     ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, availableEdgeModes,
1706                      sizeof(availableEdgeModes));
1707   }
1708 
1709   // android.info
1710 
1711   static const uint8_t supportedHardwareLevel =
1712       hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL
1713                                 : ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1714   ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1715                    &supportedHardwareLevel,
1716                    /*count*/ 1);
1717 
1718   // android.noiseReduction
1719 
1720   if (hasCapability(BACKWARD_COMPATIBLE)) {
1721     static const uint8_t availableNoiseReductionModes[] = {
1722         ANDROID_NOISE_REDUCTION_MODE_OFF, ANDROID_NOISE_REDUCTION_MODE_FAST,
1723         ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY};
1724     ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1725                      availableNoiseReductionModes,
1726                      sizeof(availableNoiseReductionModes));
1727   } else {
1728     static const uint8_t availableNoiseReductionModes[] = {
1729         ANDROID_NOISE_REDUCTION_MODE_OFF,
1730     };
1731     ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1732                      availableNoiseReductionModes,
1733                      sizeof(availableNoiseReductionModes));
1734   }
1735 
1736   // android.depth
1737 
1738   if (hasCapability(DEPTH_OUTPUT)) {
1739     static const int32_t maxDepthSamples = 100;
1740     ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &maxDepthSamples, 1);
1741 
1742     static const int32_t availableDepthStreamConfigurations[] = {
1743         HAL_PIXEL_FORMAT_Y16,
1744         160,
1745         120,
1746         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
1747         HAL_PIXEL_FORMAT_BLOB,
1748         maxDepthSamples,
1749         1,
1750         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT};
1751     ADD_STATIC_ENTRY(
1752         ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
1753         availableDepthStreamConfigurations,
1754         sizeof(availableDepthStreamConfigurations) / sizeof(int32_t));
1755 
1756     static const int64_t availableDepthMinFrameDurations[] = {
1757         HAL_PIXEL_FORMAT_Y16,
1758         160,
1759         120,
1760         Sensor::kFrameDurationRange[0],
1761         HAL_PIXEL_FORMAT_BLOB,
1762         maxDepthSamples,
1763         1,
1764         Sensor::kFrameDurationRange[0]};
1765     ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
1766                      availableDepthMinFrameDurations,
1767                      sizeof(availableDepthMinFrameDurations) / sizeof(int64_t));
1768 
1769     static const int64_t availableDepthStallDurations[] = {
1770         HAL_PIXEL_FORMAT_Y16,
1771         160,
1772         120,
1773         Sensor::kFrameDurationRange[0],
1774         HAL_PIXEL_FORMAT_BLOB,
1775         maxDepthSamples,
1776         1,
1777         Sensor::kFrameDurationRange[0]};
1778     ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
1779                      availableDepthStallDurations,
1780                      sizeof(availableDepthStallDurations) / sizeof(int64_t));
1781 
1782     uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
1783     ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthIsExclusive, 1);
1784   }
1785 
1786   // android.shading
1787 
1788   if (hasCapability(BACKWARD_COMPATIBLE)) {
1789     static const uint8_t availableShadingModes[] = {
1790         ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST,
1791         ANDROID_SHADING_MODE_HIGH_QUALITY};
1792     ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1793                      sizeof(availableShadingModes));
1794   } else {
1795     static const uint8_t availableShadingModes[] = {ANDROID_SHADING_MODE_OFF};
1796     ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1797                      sizeof(availableShadingModes));
1798   }
1799 
1800   // android.request
1801 
1802   static const int32_t maxNumOutputStreams[] = {
1803       kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount};
1804   ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams,
1805                    3);
1806 
1807   static const uint8_t maxPipelineDepth = kMaxBufferCount;
1808   ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1809 
1810   static const int32_t partialResultCount = 1;
1811   ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount,
1812                    /*count*/ 1);
1813 
1814   SortedVector<uint8_t> caps;
1815   for (size_t i = 0; i < mCapabilities.size(); i++) {
1816     switch (mCapabilities[i]) {
1817       case BACKWARD_COMPATIBLE:
1818         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1819         break;
1820       case MANUAL_SENSOR:
1821         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
1822         break;
1823       case MANUAL_POST_PROCESSING:
1824         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
1825         break;
1826       case RAW:
1827         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
1828         break;
1829       case PRIVATE_REPROCESSING:
1830         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1831         break;
1832       case READ_SENSOR_SETTINGS:
1833         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1834         break;
1835       case BURST_CAPTURE:
1836         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1837         break;
1838       case YUV_REPROCESSING:
1839         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1840         break;
1841       case DEPTH_OUTPUT:
1842         caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
1843         break;
1844       case CONSTRAINED_HIGH_SPEED_VIDEO:
1845         caps.add(
1846             ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1847         break;
1848       default:
1849         // Ignore LEVELs
1850         break;
1851     }
1852   }
1853   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(),
1854                    caps.size());
1855 
1856   // Scan a default request template for included request keys
1857   Vector<int32_t> availableRequestKeys;
1858   const camera_metadata_t *previewRequest =
1859       constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1860   for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
1861     camera_metadata_ro_entry_t entry;
1862     get_camera_metadata_ro_entry(previewRequest, i, &entry);
1863     availableRequestKeys.add(entry.tag);
1864   }
1865   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
1866                    availableRequestKeys.array(), availableRequestKeys.size());
1867 
1868   // Add a few more result keys. Must be kept up to date with the various places
1869   // that add these
1870 
1871   Vector<int32_t> availableResultKeys(availableRequestKeys);
1872   if (hasCapability(BACKWARD_COMPATIBLE)) {
1873     availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1874     availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1875     availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1876     availableResultKeys.add(ANDROID_FLASH_STATE);
1877     availableResultKeys.add(ANDROID_LENS_STATE);
1878     availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1879     availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1880     availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1881   }
1882 
1883   if (hasCapability(DEPTH_OUTPUT)) {
1884     availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
1885     availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
1886     availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
1887     availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
1888   }
1889 
1890   availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1891   availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1892 
1893   ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
1894                    availableResultKeys.array(), availableResultKeys.size());
1895 
1896   // Needs to be last, to collect all the keys set
1897 
1898   availableCharacteristicsKeys.add(
1899       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1900   info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1901               availableCharacteristicsKeys);
1902 
1903   mCameraInfo = info.release();
1904 
1905 #undef ADD_STATIC_ENTRY
1906   return OK;
1907 }
1908 
process3A(CameraMetadata & settings)1909 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1910   /**
1911    * Extract top-level 3A controls
1912    */
1913   status_t res;
1914 
1915   camera_metadata_entry e;
1916 
1917   e = settings.find(ANDROID_CONTROL_MODE);
1918   if (e.count == 0) {
1919     ALOGE("%s: No control mode entry!", __FUNCTION__);
1920     return BAD_VALUE;
1921   }
1922   uint8_t controlMode = e.data.u8[0];
1923 
1924   if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1925     mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
1926     mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
1927     mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
1928     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1929     mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1930     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1931     update3A(settings);
1932     return OK;
1933   } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1934     if (!hasCapability(BACKWARD_COMPATIBLE)) {
1935       ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1936             __FUNCTION__);
1937       return BAD_VALUE;
1938     }
1939 
1940     e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1941     if (e.count == 0) {
1942       ALOGE("%s: No scene mode entry!", __FUNCTION__);
1943       return BAD_VALUE;
1944     }
1945     uint8_t sceneMode = e.data.u8[0];
1946 
1947     switch (sceneMode) {
1948       case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1949         mFacePriority = true;
1950         break;
1951       default:
1952         ALOGE("%s: Emulator doesn't support scene mode %d", __FUNCTION__,
1953               sceneMode);
1954         return BAD_VALUE;
1955     }
1956   } else {
1957     mFacePriority = false;
1958   }
1959 
1960   // controlMode == AUTO or sceneMode = FACE_PRIORITY
1961   // Process individual 3A controls
1962 
1963   res = doFakeAE(settings);
1964   if (res != OK) return res;
1965 
1966   res = doFakeAF(settings);
1967   if (res != OK) return res;
1968 
1969   res = doFakeAWB(settings);
1970   if (res != OK) return res;
1971 
1972   update3A(settings);
1973   return OK;
1974 }
1975 
doFakeAE(CameraMetadata & settings)1976 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1977   camera_metadata_entry e;
1978 
1979   e = settings.find(ANDROID_CONTROL_AE_MODE);
1980   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1981     ALOGE("%s: No AE mode entry!", __FUNCTION__);
1982     return BAD_VALUE;
1983   }
1984   uint8_t aeMode =
1985       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1986   mAeMode = aeMode;
1987 
1988   switch (aeMode) {
1989     case ANDROID_CONTROL_AE_MODE_OFF:
1990       // AE is OFF
1991       mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1992       return OK;
1993     case ANDROID_CONTROL_AE_MODE_ON:
1994       // OK for AUTO modes
1995       break;
1996     default:
1997       // Mostly silently ignore unsupported modes
1998       ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
1999             __FUNCTION__, aeMode);
2000       break;
2001   }
2002 
2003   e = settings.find(ANDROID_CONTROL_AE_LOCK);
2004   bool aeLocked =
2005       (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
2006 
2007   e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2008   bool precaptureTrigger = false;
2009   if (e.count != 0) {
2010     precaptureTrigger =
2011         (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2012   }
2013 
2014   if (precaptureTrigger) {
2015     ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2016   } else if (e.count > 0) {
2017     ALOGV("%s: Pre capture trigger was present? %zu", __FUNCTION__, e.count);
2018   }
2019 
2020   if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2021     // Run precapture sequence
2022     if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2023       mAeCounter = 0;
2024     }
2025 
2026     if (mFacePriority) {
2027       mAeTargetExposureTime = kFacePriorityExposureTime;
2028     } else {
2029       mAeTargetExposureTime = kNormalExposureTime;
2030     }
2031 
2032     if (mAeCounter > kPrecaptureMinFrames &&
2033         (mAeTargetExposureTime - mAeCurrentExposureTime) <
2034             mAeTargetExposureTime / 10) {
2035       // Done with precapture
2036       mAeCounter = 0;
2037       mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED
2038                           : ANDROID_CONTROL_AE_STATE_CONVERGED;
2039     } else {
2040       // Converge some more
2041       mAeCurrentExposureTime +=
2042           (mAeTargetExposureTime - mAeCurrentExposureTime) * kExposureTrackRate;
2043       mAeCounter++;
2044       mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2045     }
2046 
2047   } else if (!aeLocked) {
2048     // Run standard occasional AE scan
2049     switch (mAeState) {
2050       case ANDROID_CONTROL_AE_STATE_CONVERGED:
2051       case ANDROID_CONTROL_AE_STATE_INACTIVE:
2052         mAeCounter++;
2053         if (mAeCounter > kStableAeMaxFrames) {
2054           mAeTargetExposureTime =
2055               mFacePriority ? kFacePriorityExposureTime : kNormalExposureTime;
2056           float exposureStep = ((double)rand() / RAND_MAX) *
2057                                    (kExposureWanderMax - kExposureWanderMin) +
2058                                kExposureWanderMin;
2059           mAeTargetExposureTime *= std::pow(2, exposureStep);
2060           mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2061         }
2062         break;
2063       case ANDROID_CONTROL_AE_STATE_SEARCHING:
2064         mAeCurrentExposureTime +=
2065             (mAeTargetExposureTime - mAeCurrentExposureTime) *
2066             kExposureTrackRate;
2067         if (llabs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2068             mAeTargetExposureTime / 10) {
2069           // Close enough
2070           mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2071           mAeCounter = 0;
2072         }
2073         break;
2074       case ANDROID_CONTROL_AE_STATE_LOCKED:
2075         mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2076         mAeCounter = 0;
2077         break;
2078       default:
2079         ALOGE("%s: Emulator in unexpected AE state %d", __FUNCTION__, mAeState);
2080         return INVALID_OPERATION;
2081     }
2082   } else {
2083     // AE is locked
2084     mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2085   }
2086 
2087   return OK;
2088 }
2089 
doFakeAF(CameraMetadata & settings)2090 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2091   camera_metadata_entry e;
2092 
2093   e = settings.find(ANDROID_CONTROL_AF_MODE);
2094   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2095     ALOGE("%s: No AF mode entry!", __FUNCTION__);
2096     return BAD_VALUE;
2097   }
2098   uint8_t afMode =
2099       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
2100 
2101   e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2102   typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2103   af_trigger_t afTrigger;
2104   if (e.count != 0) {
2105     afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2106 
2107     ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2108     ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2109   } else {
2110     afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2111   }
2112 
2113   switch (afMode) {
2114     case ANDROID_CONTROL_AF_MODE_OFF:
2115       mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2116       return OK;
2117     case ANDROID_CONTROL_AF_MODE_AUTO:
2118     case ANDROID_CONTROL_AF_MODE_MACRO:
2119     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2120     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2121       if (!mFacingBack) {
2122         ALOGE("%s: Front camera doesn't support AF mode %d", __FUNCTION__,
2123               afMode);
2124         return BAD_VALUE;
2125       }
2126       // OK, handle transitions lower on
2127       break;
2128     default:
2129       ALOGE("%s: Emulator doesn't support AF mode %d", __FUNCTION__, afMode);
2130       return BAD_VALUE;
2131   }
2132 
2133   bool afModeChanged = mAfMode != afMode;
2134   mAfMode = afMode;
2135 
2136   /**
2137    * Simulate AF triggers. Transition at most 1 state per frame.
2138    * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2139    */
2140 
2141   bool afTriggerStart = false;
2142   bool afTriggerCancel = false;
2143   switch (afTrigger) {
2144     case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2145       break;
2146     case ANDROID_CONTROL_AF_TRIGGER_START:
2147       afTriggerStart = true;
2148       break;
2149     case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2150       afTriggerCancel = true;
2151       // Cancel trigger always transitions into INACTIVE
2152       mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2153 
2154       ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2155 
2156       // Stay in 'inactive' until at least next frame
2157       return OK;
2158     default:
2159       ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2160       return BAD_VALUE;
2161   }
2162 
2163   // If we get down here, we're either in an autofocus mode
2164   //  or in a continuous focus mode (and no other modes)
2165 
2166   int oldAfState = mAfState;
2167   switch (mAfState) {
2168     case ANDROID_CONTROL_AF_STATE_INACTIVE:
2169       if (afTriggerStart) {
2170         switch (afMode) {
2171           case ANDROID_CONTROL_AF_MODE_AUTO:
2172             // fall-through
2173           case ANDROID_CONTROL_AF_MODE_MACRO:
2174             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2175             break;
2176           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2177             // fall-through
2178           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2179             mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2180             break;
2181         }
2182       } else {
2183         // At least one frame stays in INACTIVE
2184         if (!afModeChanged) {
2185           switch (afMode) {
2186             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2187               // fall-through
2188             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2189               mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2190               break;
2191           }
2192         }
2193       }
2194       break;
2195     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2196       /**
2197        * When the AF trigger is activated, the algorithm should finish
2198        * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2199        * or AF_NOT_FOCUSED as appropriate
2200        */
2201       if (afTriggerStart) {
2202         // Randomly transition to focused or not focused
2203         if (rand() % 3) {
2204           mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2205         } else {
2206           mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2207         }
2208       }
2209       /**
2210        * When the AF trigger is not involved, the AF algorithm should
2211        * start in INACTIVE state, and then transition into PASSIVE_SCAN
2212        * and PASSIVE_FOCUSED states
2213        */
2214       else if (!afTriggerCancel) {
2215         // Randomly transition to passive focus
2216         if (rand() % 3 == 0) {
2217           mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2218         }
2219       }
2220 
2221       break;
2222     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2223       if (afTriggerStart) {
2224         // Randomly transition to focused or not focused
2225         if (rand() % 3) {
2226           mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2227         } else {
2228           mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2229         }
2230       }
2231       // TODO: initiate passive scan (PASSIVE_SCAN)
2232       break;
2233     case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2234       // Simulate AF sweep completing instantaneously
2235 
2236       // Randomly transition to focused or not focused
2237       if (rand() % 3) {
2238         mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2239       } else {
2240         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2241       }
2242       break;
2243     case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2244       if (afTriggerStart) {
2245         switch (afMode) {
2246           case ANDROID_CONTROL_AF_MODE_AUTO:
2247             // fall-through
2248           case ANDROID_CONTROL_AF_MODE_MACRO:
2249             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2250             break;
2251           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2252             // fall-through
2253           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2254             // continuous autofocus => trigger start has no effect
2255             break;
2256         }
2257       }
2258       break;
2259     case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2260       if (afTriggerStart) {
2261         switch (afMode) {
2262           case ANDROID_CONTROL_AF_MODE_AUTO:
2263             // fall-through
2264           case ANDROID_CONTROL_AF_MODE_MACRO:
2265             mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2266             break;
2267           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2268             // fall-through
2269           case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2270             // continuous autofocus => trigger start has no effect
2271             break;
2272         }
2273       }
2274       break;
2275     default:
2276       ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2277   }
2278 
2279   {
2280     char afStateString[100] = {
2281         0,
2282     };
2283     camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, oldAfState,
2284                                  afStateString, sizeof(afStateString));
2285 
2286     char afNewStateString[100] = {
2287         0,
2288     };
2289     camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, mAfState,
2290                                  afNewStateString, sizeof(afNewStateString));
2291     ALOGVV("%s: AF state transitioned from %s to %s", __FUNCTION__,
2292            afStateString, afNewStateString);
2293   }
2294 
2295   return OK;
2296 }
2297 
doFakeAWB(CameraMetadata & settings)2298 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2299   camera_metadata_entry e;
2300 
2301   e = settings.find(ANDROID_CONTROL_AWB_MODE);
2302   if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2303     ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2304     return BAD_VALUE;
2305   }
2306   uint8_t awbMode =
2307       (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
2308 
2309   // TODO: Add white balance simulation
2310 
2311   e = settings.find(ANDROID_CONTROL_AWB_LOCK);
2312   bool awbLocked =
2313       (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
2314 
2315   switch (awbMode) {
2316     case ANDROID_CONTROL_AWB_MODE_OFF:
2317       mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2318       break;
2319     case ANDROID_CONTROL_AWB_MODE_AUTO:
2320     case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2321     case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2322     case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2323     case ANDROID_CONTROL_AWB_MODE_SHADE:
2324       // Always magically right, or locked
2325       mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED
2326                             : ANDROID_CONTROL_AWB_STATE_CONVERGED;
2327       break;
2328     default:
2329       ALOGE("%s: Emulator doesn't support AWB mode %d", __FUNCTION__, awbMode);
2330       return BAD_VALUE;
2331   }
2332 
2333   return OK;
2334 }
2335 
update3A(CameraMetadata & settings)2336 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2337   if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
2338     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &mAeCurrentExposureTime, 1);
2339     settings.update(ANDROID_SENSOR_SENSITIVITY, &mAeCurrentSensitivity, 1);
2340   }
2341 
2342   settings.update(ANDROID_CONTROL_AE_STATE, &mAeState, 1);
2343   settings.update(ANDROID_CONTROL_AF_STATE, &mAfState, 1);
2344   settings.update(ANDROID_CONTROL_AWB_STATE, &mAwbState, 1);
2345 
2346   uint8_t lensState;
2347   switch (mAfState) {
2348     case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2349     case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2350       lensState = ANDROID_LENS_STATE_MOVING;
2351       break;
2352     case ANDROID_CONTROL_AF_STATE_INACTIVE:
2353     case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2354     case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2355     case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2356     case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2357     default:
2358       lensState = ANDROID_LENS_STATE_STATIONARY;
2359       break;
2360   }
2361   settings.update(ANDROID_LENS_STATE, &lensState, 1);
2362 }
2363 
signalReadoutIdle()2364 void EmulatedFakeCamera3::signalReadoutIdle() {
2365   Mutex::Autolock l(mLock);
2366   // Need to chek isIdle again because waiting on mLock may have allowed
2367   // something to be placed in the in-flight queue.
2368   if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2369     ALOGV("Now idle");
2370     mStatus = STATUS_READY;
2371   }
2372 }
2373 
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)2374 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2375                                         nsecs_t timestamp) {
2376   switch (e) {
2377     case Sensor::SensorListener::EXPOSURE_START: {
2378       ALOGVV("%s: Frame %d: Sensor started exposure at %lld", __FUNCTION__,
2379              frameNumber, timestamp);
2380       // Trigger shutter notify to framework
2381       camera3_notify_msg_t msg;
2382       msg.type = CAMERA3_MSG_SHUTTER;
2383       msg.message.shutter.frame_number = frameNumber;
2384       msg.message.shutter.timestamp = timestamp;
2385       sendNotify(&msg);
2386       break;
2387     }
2388     default:
2389       ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, e,
2390             timestamp);
2391       break;
2392   }
2393 }
2394 
ReadoutThread(EmulatedFakeCamera3 * parent)2395 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent)
2396     : mParent(parent), mJpegWaiting(false) {}
2397 
~ReadoutThread()2398 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2399   for (List<Request>::iterator i = mInFlightQueue.begin();
2400        i != mInFlightQueue.end(); i++) {
2401     delete i->buffers;
2402     delete i->sensorBuffers;
2403   }
2404 }
2405 
queueCaptureRequest(const Request & r)2406 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2407   Mutex::Autolock l(mLock);
2408 
2409   mInFlightQueue.push_back(r);
2410   mInFlightSignal.signal();
2411 }
2412 
isIdle()2413 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2414   Mutex::Autolock l(mLock);
2415   return mInFlightQueue.empty() && !mThreadActive;
2416 }
2417 
waitForReadout()2418 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2419   status_t res;
2420   Mutex::Autolock l(mLock);
2421   int loopCount = 0;
2422   while (mInFlightQueue.size() >= kMaxQueueSize) {
2423     res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2424     if (res != OK && res != TIMED_OUT) {
2425       ALOGE("%s: Error waiting for in-flight queue to shrink", __FUNCTION__);
2426       return INVALID_OPERATION;
2427     }
2428     if (loopCount == kMaxWaitLoops) {
2429       ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2430             __FUNCTION__);
2431       return TIMED_OUT;
2432     }
2433     loopCount++;
2434   }
2435   return OK;
2436 }
2437 
threadLoop()2438 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2439   status_t res;
2440 
2441   ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2442 
2443   // First wait for a request from the in-flight queue
2444 
2445   if (mCurrentRequest.settings.isEmpty()) {
2446     Mutex::Autolock l(mLock);
2447     if (mInFlightQueue.empty()) {
2448       res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2449       if (res == TIMED_OUT) {
2450         ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2451                __FUNCTION__);
2452         return true;
2453       } else if (res != NO_ERROR) {
2454         ALOGE("%s: Error waiting for capture requests: %d", __FUNCTION__, res);
2455         return false;
2456       }
2457     }
2458     mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2459     mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2460     mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2461     mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2462     mInFlightQueue.erase(mInFlightQueue.begin());
2463     mInFlightSignal.signal();
2464     mThreadActive = true;
2465     ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2466            mCurrentRequest.frameNumber);
2467   }
2468 
2469   // Then wait for it to be delivered from the sensor
2470   ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2471          __FUNCTION__);
2472 
2473   nsecs_t captureTime;
2474   bool gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2475   if (!gotFrame) {
2476     ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2477            __FUNCTION__);
2478     return true;
2479   }
2480 
2481   ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2482          mCurrentRequest.frameNumber, captureTime);
2483 
2484   // Check if we need to JPEG encode a buffer, and send it for async
2485   // compression if so. Otherwise prepare the buffer for return.
2486   bool needJpeg = false;
2487   HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2488   while (buf != mCurrentRequest.buffers->end()) {
2489     bool goodBuffer = true;
2490     if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
2491         buf->stream->data_space != HAL_DATASPACE_DEPTH) {
2492       Mutex::Autolock jl(mJpegLock);
2493       if (mJpegWaiting) {
2494         // This shouldn't happen, because processCaptureRequest should
2495         // be stalling until JPEG compressor is free.
2496         ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2497         goodBuffer = false;
2498       }
2499       if (goodBuffer) {
2500         // Compressor takes ownership of sensorBuffers here
2501         res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2502                                               this);
2503         goodBuffer = (res == OK);
2504       }
2505       if (goodBuffer) {
2506         needJpeg = true;
2507 
2508         mJpegHalBuffer = *buf;
2509         mJpegFrameNumber = mCurrentRequest.frameNumber;
2510         mJpegWaiting = true;
2511 
2512         mCurrentRequest.sensorBuffers = NULL;
2513         buf = mCurrentRequest.buffers->erase(buf);
2514 
2515         continue;
2516       }
2517       ALOGE("%s: Error compressing output buffer: %s (%d)", __FUNCTION__,
2518             strerror(-res), res);
2519       // fallthrough for cleanup
2520     }
2521     GrallocModule::getInstance().unlock(*(buf->buffer));
2522 
2523     buf->status =
2524         goodBuffer ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2525     buf->acquire_fence = -1;
2526     buf->release_fence = -1;
2527 
2528     ++buf;
2529   }  // end while
2530 
2531   // Construct result for all completed buffers and results
2532 
2533   camera3_capture_result result;
2534 
2535   if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2536     static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2537     mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2538                                     &sceneFlicker, 1);
2539 
2540     static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2541     mCurrentRequest.settings.update(ANDROID_FLASH_STATE, &flashState, 1);
2542 
2543     nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
2544     mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2545                                     &rollingShutterSkew, 1);
2546 
2547     float focusRange[] = {1.0f / 5.0f, 0};  // 5 m to infinity in focus
2548     mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
2549                                     sizeof(focusRange) / sizeof(float));
2550   }
2551 
2552   if (mParent->hasCapability(DEPTH_OUTPUT)) {
2553     camera_metadata_entry_t entry;
2554 
2555     find_camera_metadata_entry(mParent->mCameraInfo,
2556                                ANDROID_LENS_POSE_TRANSLATION, &entry);
2557     mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION, entry.data.f,
2558                                     entry.count);
2559 
2560     find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION,
2561                                &entry);
2562     mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION, entry.data.f,
2563                                     entry.count);
2564 
2565     find_camera_metadata_entry(mParent->mCameraInfo,
2566                                ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
2567     mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
2568                                     entry.data.f, entry.count);
2569 
2570     find_camera_metadata_entry(mParent->mCameraInfo,
2571                                ANDROID_LENS_RADIAL_DISTORTION, &entry);
2572     mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
2573                                     entry.data.f, entry.count);
2574   }
2575 
2576   mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, &captureTime, 1);
2577 
2578   // JPEGs take a stage longer
2579   const uint8_t pipelineDepth =
2580       needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2581   mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2582                                   &pipelineDepth, 1);
2583 
2584   result.frame_number = mCurrentRequest.frameNumber;
2585   result.result = mCurrentRequest.settings.getAndLock();
2586   result.num_output_buffers = mCurrentRequest.buffers->size();
2587   result.output_buffers = mCurrentRequest.buffers->array();
2588   result.input_buffer = nullptr;
2589   result.partial_result = 1;
2590 
2591   // Go idle if queue is empty, before sending result
2592   bool signalIdle = false;
2593   {
2594     Mutex::Autolock l(mLock);
2595     if (mInFlightQueue.empty()) {
2596       mThreadActive = false;
2597       signalIdle = true;
2598     }
2599   }
2600   if (signalIdle) mParent->signalReadoutIdle();
2601 
2602   // Send it off to the framework
2603   ALOGVV("%s: ReadoutThread: Send result to framework", __FUNCTION__);
2604   mParent->sendCaptureResult(&result);
2605 
2606   // Clean up
2607   mCurrentRequest.settings.unlock(result.result);
2608 
2609   delete mCurrentRequest.buffers;
2610   mCurrentRequest.buffers = NULL;
2611   if (!needJpeg) {
2612     delete mCurrentRequest.sensorBuffers;
2613     mCurrentRequest.sensorBuffers = NULL;
2614   }
2615   mCurrentRequest.settings.clear();
2616 
2617   return true;
2618 }
2619 
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2620 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2621     const StreamBuffer &jpegBuffer, bool success) {
2622   Mutex::Autolock jl(mJpegLock);
2623 
2624   GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
2625 
2626   mJpegHalBuffer.status =
2627       success ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2628   mJpegHalBuffer.acquire_fence = -1;
2629   mJpegHalBuffer.release_fence = -1;
2630   mJpegWaiting = false;
2631 
2632   camera3_capture_result result;
2633 
2634   result.frame_number = mJpegFrameNumber;
2635   result.result = NULL;
2636   result.num_output_buffers = 1;
2637   result.output_buffers = &mJpegHalBuffer;
2638   result.input_buffer = nullptr;
2639   result.partial_result = 0;
2640 
2641   if (!success) {
2642     ALOGE(
2643         "%s: Compression failure, returning error state buffer to"
2644         " framework",
2645         __FUNCTION__);
2646   } else {
2647     ALOGV("%s: Compression complete, returning buffer to framework",
2648           __FUNCTION__);
2649   }
2650 
2651   mParent->sendCaptureResult(&result);
2652 }
2653 
onJpegInputDone(const StreamBuffer &)2654 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2655     const StreamBuffer & /*inputBuffer*/) {
2656   // Should never get here, since the input buffer has to be returned
2657   // by end of processCaptureRequest
2658   ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2659 }
2660 
2661 };  // namespace android
2662