• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 //#define LOG_NNDEBUG 0
19 #include "system/graphics-base-v1.1.h"
20 #define LOG_TAG "EmulatedSensor"
21 #define ATRACE_TAG ATRACE_TAG_CAMERA
22 
23 #ifdef LOG_NNDEBUG
24 #define ALOGVV(...) ALOGV(__VA_ARGS__)
25 #else
26 #define ALOGVV(...) ((void)0)
27 #endif
28 
29 #include <android/hardware/graphics/common/1.2/types.h>
30 #include <cutils/properties.h>
31 #include <inttypes.h>
32 #include <libyuv.h>
33 #include <memory.h>
34 #include <system/camera_metadata.h>
35 #include <utils/Log.h>
36 #include <utils/Trace.h>
37 
38 #include <cmath>
39 #include <cstdlib>
40 
41 #include "EmulatedSensor.h"
42 #include "utils/ExifUtils.h"
43 #include "utils/HWLUtils.h"
44 
45 namespace android {
46 
47 using android::google_camera_hal::ErrorCode;
48 using google_camera_hal::HalCameraMetadata;
49 using google_camera_hal::MessageType;
50 using google_camera_hal::NotifyMessage;
51 
52 using android::hardware::graphics::common::V1_2::Dataspace;
53 
54 const uint32_t EmulatedSensor::kRegularSceneHandshake = 1; // Scene handshake divider
55 const uint32_t EmulatedSensor::kReducedSceneHandshake = 2; // Scene handshake divider
56 
57 // 1 us - 30 sec
58 const nsecs_t EmulatedSensor::kSupportedExposureTimeRange[2] = {1000LL,
59                                                                 30000000000LL};
60 
61 // ~1/30 s - 30 sec
62 const nsecs_t EmulatedSensor::kSupportedFrameDurationRange[2] = {33331760LL,
63                                                                  30000000000LL};
64 
65 const int32_t EmulatedSensor::kSupportedSensitivityRange[2] = {100, 1600};
66 const int32_t EmulatedSensor::kDefaultSensitivity = 100;  // ISO
67 const nsecs_t EmulatedSensor::kDefaultExposureTime = ms2ns(15);
68 const nsecs_t EmulatedSensor::kDefaultFrameDuration = ms2ns(33);
69 // Deadline within we should return the results as soon as possible to
70 // avoid skewing the frame cycle due to external delays.
71 const nsecs_t EmulatedSensor::kReturnResultThreshod = 3 * kDefaultFrameDuration;
72 
73 // Sensor defaults
74 const uint8_t EmulatedSensor::kSupportedColorFilterArrangement =
75     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
76 const uint32_t EmulatedSensor::kDefaultMaxRawValue = 4000;
77 const uint32_t EmulatedSensor::kDefaultBlackLevelPattern[4] = {1000, 1000, 1000,
78                                                                1000};
79 
80 const nsecs_t EmulatedSensor::kMinVerticalBlank = 10000L;
81 
82 // Sensor sensitivity
83 const float EmulatedSensor::kSaturationVoltage = 0.520f;
84 const uint32_t EmulatedSensor::kSaturationElectrons = 2000;
85 const float EmulatedSensor::kVoltsPerLuxSecond = 0.100f;
86 
87 const float EmulatedSensor::kElectronsPerLuxSecond =
88     EmulatedSensor::kSaturationElectrons / EmulatedSensor::kSaturationVoltage *
89     EmulatedSensor::kVoltsPerLuxSecond;
90 
91 const float EmulatedSensor::kReadNoiseStddevBeforeGain = 1.177;  // in electrons
92 const float EmulatedSensor::kReadNoiseStddevAfterGain =
93     2.100;  // in digital counts
94 const float EmulatedSensor::kReadNoiseVarBeforeGain =
95     EmulatedSensor::kReadNoiseStddevBeforeGain *
96     EmulatedSensor::kReadNoiseStddevBeforeGain;
97 const float EmulatedSensor::kReadNoiseVarAfterGain =
98     EmulatedSensor::kReadNoiseStddevAfterGain *
99     EmulatedSensor::kReadNoiseStddevAfterGain;
100 
101 const uint32_t EmulatedSensor::kMaxRAWStreams = 1;
102 const uint32_t EmulatedSensor::kMaxProcessedStreams = 3;
103 const uint32_t EmulatedSensor::kMaxStallingStreams = 2;
104 const uint32_t EmulatedSensor::kMaxInputStreams = 1;
105 
106 const uint32_t EmulatedSensor::kMaxLensShadingMapSize[2]{64, 64};
107 const int32_t EmulatedSensor::kFixedBitPrecision = 64;  // 6-bit
108 // In fixed-point math, saturation point of sensor after gain
109 const int32_t EmulatedSensor::kSaturationPoint = kFixedBitPrecision * 255;
110 const camera_metadata_rational EmulatedSensor::kNeutralColorPoint[3] = {
111     {255, 1}, {255, 1}, {255, 1}};
112 const float EmulatedSensor::kGreenSplit = 1.f;  // No divergence
113 // Reduce memory usage by allowing only one buffer in sensor, one in jpeg
114 // compressor and one pending request to avoid stalls.
115 const uint8_t EmulatedSensor::kPipelineDepth = 3;
116 
117 const camera_metadata_rational EmulatedSensor::kDefaultColorTransform[9] = {
118     {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
119 const float EmulatedSensor::kDefaultColorCorrectionGains[4] = {1.0f, 1.0f, 1.0f,
120                                                                1.0f};
121 
122 const float EmulatedSensor::kDefaultToneMapCurveRed[4] = {.0f, .0f, 1.f, 1.f};
123 const float EmulatedSensor::kDefaultToneMapCurveGreen[4] = {.0f, .0f, 1.f, 1.f};
124 const float EmulatedSensor::kDefaultToneMapCurveBlue[4] = {.0f, .0f, 1.f, 1.f};
125 
126 /** A few utility functions for math, normal distributions */
127 
128 // Take advantage of IEEE floating-point format to calculate an approximate
129 // square root. Accurate to within +-3.6%
sqrtf_approx(float r)130 float sqrtf_approx(float r) {
131   // Modifier is based on IEEE floating-point representation; the
132   // manipulations boil down to finding approximate log2, dividing by two, and
133   // then inverting the log2. A bias is added to make the relative error
134   // symmetric about the real answer.
135   const int32_t modifier = 0x1FBB4000;
136 
137   int32_t r_i = *(int32_t*)(&r);
138   r_i = (r_i >> 1) + modifier;
139 
140   return *(float*)(&r_i);
141 }
142 
EmulatedSensor()143 EmulatedSensor::EmulatedSensor() : Thread(false), got_vsync_(false) {
144   gamma_table_.resize(kSaturationPoint + 1);
145   for (int32_t i = 0; i <= kSaturationPoint; i++) {
146     gamma_table_[i] = ApplysRGBGamma(i, kSaturationPoint);
147   }
148 }
149 
~EmulatedSensor()150 EmulatedSensor::~EmulatedSensor() {
151   ShutDown();
152 }
153 
AreCharacteristicsSupported(const SensorCharacteristics & characteristics)154 bool EmulatedSensor::AreCharacteristicsSupported(
155     const SensorCharacteristics& characteristics) {
156   if ((characteristics.width == 0) || (characteristics.height == 0)) {
157     ALOGE("%s: Invalid sensor size %zux%zu", __FUNCTION__,
158           characteristics.width, characteristics.height);
159     return false;
160   }
161 
162   if ((characteristics.full_res_width == 0) ||
163       (characteristics.full_res_height == 0)) {
164     ALOGE("%s: Invalid sensor full res size %zux%zu", __FUNCTION__,
165           characteristics.full_res_width, characteristics.full_res_height);
166     return false;
167   }
168 
169   if (characteristics.is_10bit_dynamic_range_capable) {
170     // We support only HLG10 at the moment
171     const auto& hlg10_entry = characteristics.dynamic_range_profiles.find(
172         ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10);
173     if ((characteristics.dynamic_range_profiles.size() != 1) ||
174         (hlg10_entry == characteristics.dynamic_range_profiles.end())) {
175       ALOGE("%s: Only support for HLG10 is available!", __FUNCTION__);
176       return false;
177     }
178   }
179 
180   if ((characteristics.exposure_time_range[0] >=
181        characteristics.exposure_time_range[1]) ||
182       ((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
183        (characteristics.exposure_time_range[1] >
184         kSupportedExposureTimeRange[1]))) {
185     ALOGE("%s: Unsupported exposure range", __FUNCTION__);
186     return false;
187   }
188 
189   if ((characteristics.frame_duration_range[0] >=
190        characteristics.frame_duration_range[1]) ||
191       ((characteristics.frame_duration_range[0] <
192         kSupportedFrameDurationRange[0]) ||
193        (characteristics.frame_duration_range[1] >
194         kSupportedFrameDurationRange[1]))) {
195     ALOGE("%s: Unsupported frame duration range", __FUNCTION__);
196     return false;
197   }
198 
199   if ((characteristics.sensitivity_range[0] >=
200        characteristics.sensitivity_range[1]) ||
201       ((characteristics.sensitivity_range[0] < kSupportedSensitivityRange[0]) ||
202        (characteristics.sensitivity_range[1] > kSupportedSensitivityRange[1])) ||
203       (!((kDefaultSensitivity >= characteristics.sensitivity_range[0]) &&
204          (kDefaultSensitivity <= characteristics.sensitivity_range[1])))) {
205     ALOGE("%s: Unsupported sensitivity range", __FUNCTION__);
206     return false;
207   }
208 
209   if (characteristics.color_arangement != kSupportedColorFilterArrangement) {
210     ALOGE("%s: Unsupported color arrangement!", __FUNCTION__);
211     return false;
212   }
213 
214   for (const auto& blackLevel : characteristics.black_level_pattern) {
215     if (blackLevel >= characteristics.max_raw_value) {
216       ALOGE("%s: Black level matches or exceeds max RAW value!", __FUNCTION__);
217       return false;
218     }
219   }
220 
221   if ((characteristics.frame_duration_range[0] / characteristics.height) == 0) {
222     ALOGE("%s: Zero row readout time!", __FUNCTION__);
223     return false;
224   }
225 
226   if (characteristics.max_raw_streams > kMaxRAWStreams) {
227     ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
228           __FUNCTION__, characteristics.max_raw_streams, kMaxRAWStreams);
229     return false;
230   }
231 
232   if (characteristics.max_processed_streams > kMaxProcessedStreams) {
233     ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
234           __FUNCTION__, characteristics.max_processed_streams,
235           kMaxProcessedStreams);
236     return false;
237   }
238 
239   if (characteristics.max_stalling_streams > kMaxStallingStreams) {
240     ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
241           __FUNCTION__, characteristics.max_stalling_streams,
242           kMaxStallingStreams);
243     return false;
244   }
245 
246   if (characteristics.max_input_streams > kMaxInputStreams) {
247     ALOGE("%s: Input streams maximum %u exceeds supported maximum %u",
248           __FUNCTION__, characteristics.max_input_streams, kMaxInputStreams);
249     return false;
250   }
251 
252   if ((characteristics.lens_shading_map_size[0] > kMaxLensShadingMapSize[0]) ||
253       (characteristics.lens_shading_map_size[1] > kMaxLensShadingMapSize[1])) {
254     ALOGE("%s: Lens shading map [%dx%d] exceeds supprorted maximum [%dx%d]",
255           __FUNCTION__, characteristics.lens_shading_map_size[0],
256           characteristics.lens_shading_map_size[1], kMaxLensShadingMapSize[0],
257           kMaxLensShadingMapSize[1]);
258     return false;
259   }
260 
261   if (characteristics.max_pipeline_depth < kPipelineDepth) {
262     ALOGE("%s: Pipeline depth %d smaller than supprorted minimum %d",
263           __FUNCTION__, characteristics.max_pipeline_depth, kPipelineDepth);
264     return false;
265   }
266 
267   return true;
268 }
269 
SplitStreamCombination(const StreamConfiguration & original_config,StreamConfiguration * default_mode_config,StreamConfiguration * max_resolution_mode_config,StreamConfiguration * input_stream_config)270 static void SplitStreamCombination(
271     const StreamConfiguration& original_config,
272     StreamConfiguration* default_mode_config,
273     StreamConfiguration* max_resolution_mode_config,
274     StreamConfiguration* input_stream_config) {
275   // Go through the streams
276   if (default_mode_config == nullptr || max_resolution_mode_config == nullptr ||
277       input_stream_config == nullptr) {
278     ALOGE("%s: Input stream / output stream configs are nullptr", __FUNCTION__);
279     return;
280   }
281   for (const auto& stream : original_config.streams) {
282     if (stream.stream_type == google_camera_hal::StreamType::kInput) {
283       input_stream_config->streams.push_back(stream);
284       continue;
285     }
286     if (stream.used_in_default_resolution_mode) {
287       default_mode_config->streams.push_back(stream);
288     }
289     if (stream.used_in_max_resolution_mode) {
290       max_resolution_mode_config->streams.push_back(stream);
291     }
292   }
293 }
294 
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & default_config_map,StreamConfigurationMap & max_resolution_config_map,const PhysicalStreamConfigurationMap & physical_map,const PhysicalStreamConfigurationMap & physical_map_max_resolution,const LogicalCharacteristics & sensor_chars)295 bool EmulatedSensor::IsStreamCombinationSupported(
296     uint32_t logical_id, const StreamConfiguration& config,
297     StreamConfigurationMap& default_config_map,
298     StreamConfigurationMap& max_resolution_config_map,
299     const PhysicalStreamConfigurationMap& physical_map,
300     const PhysicalStreamConfigurationMap& physical_map_max_resolution,
301     const LogicalCharacteristics& sensor_chars) {
302   StreamConfiguration default_mode_config, max_resolution_mode_config,
303       input_stream_config;
304   SplitStreamCombination(config, &default_mode_config,
305                          &max_resolution_mode_config, &input_stream_config);
306 
307   return IsStreamCombinationSupported(logical_id, default_mode_config,
308                                       default_config_map, physical_map,
309                                       sensor_chars) &&
310          IsStreamCombinationSupported(
311              logical_id, max_resolution_mode_config, max_resolution_config_map,
312              physical_map_max_resolution, sensor_chars, /*is_max_res*/ true) &&
313 
314          (IsStreamCombinationSupported(logical_id, input_stream_config,
315                                        default_config_map, physical_map,
316                                        sensor_chars) ||
317           IsStreamCombinationSupported(
318               logical_id, input_stream_config, max_resolution_config_map,
319               physical_map_max_resolution, sensor_chars, /*is_max_res*/ true));
320 }
321 
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & config_map,const PhysicalStreamConfigurationMap & physical_map,const LogicalCharacteristics & sensor_chars,bool is_max_res)322 bool EmulatedSensor::IsStreamCombinationSupported(
323     uint32_t logical_id, const StreamConfiguration& config,
324     StreamConfigurationMap& config_map,
325     const PhysicalStreamConfigurationMap& physical_map,
326     const LogicalCharacteristics& sensor_chars, bool is_max_res) {
327   uint32_t input_stream_count = 0;
328   // Map from physical camera id to number of streams for that physical camera
329   std::map<uint32_t, uint32_t> raw_stream_count;
330   std::map<uint32_t, uint32_t> processed_stream_count;
331   std::map<uint32_t, uint32_t> stalling_stream_count;
332 
333   // Only allow the stream configurations specified in
334   // dynamicSizeStreamConfigurations.
335   for (const auto& stream : config.streams) {
336     bool is_dynamic_output =
337         (stream.is_physical_camera_stream && stream.group_id != -1);
338     if (stream.rotation != google_camera_hal::StreamRotation::kRotation0) {
339       ALOGE("%s: Stream rotation: 0x%x not supported!", __FUNCTION__,
340             stream.rotation);
341       return false;
342     }
343 
344     if (stream.stream_type == google_camera_hal::StreamType::kInput) {
345       if (sensor_chars.at(logical_id).max_input_streams == 0) {
346         ALOGE("%s: Input streams are not supported on this device!",
347               __FUNCTION__);
348         return false;
349       }
350 
351       auto const& supported_outputs =
352           config_map.GetValidOutputFormatsForInput(stream.format);
353       if (supported_outputs.empty()) {
354         ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
355               __FUNCTION__, stream.format);
356         return false;
357       }
358 
359       input_stream_count++;
360     } else {
361       if (stream.is_physical_camera_stream &&
362           physical_map.find(stream.physical_camera_id) == physical_map.end()) {
363         ALOGE("%s: Invalid physical camera id %d", __FUNCTION__,
364               stream.physical_camera_id);
365         return false;
366       }
367 
368       if (is_dynamic_output) {
369         auto dynamic_physical_output_formats =
370             physical_map.at(stream.physical_camera_id)
371                 ->GetDynamicPhysicalStreamOutputFormats();
372         if (dynamic_physical_output_formats.find(stream.format) ==
373             dynamic_physical_output_formats.end()) {
374           ALOGE("%s: Unsupported physical stream format %d", __FUNCTION__,
375                 stream.format);
376           return false;
377         }
378       }
379 
380       if (stream.dynamic_profile !=
381           ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
382         const SensorCharacteristics& sensor_char =
383             stream.is_physical_camera_stream
384                 ? sensor_chars.at(stream.physical_camera_id)
385                 : sensor_chars.at(logical_id);
386         if (!sensor_char.is_10bit_dynamic_range_capable) {
387           ALOGE("%s: 10-bit dynamic range output not supported on this device!",
388                 __FUNCTION__);
389           return false;
390         }
391 
392         if ((stream.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) &&
393             (static_cast<android_pixel_format_v1_1_t>(stream.format) !=
394              HAL_PIXEL_FORMAT_YCBCR_P010)) {
395           ALOGE(
396               "%s: 10-bit dynamic range profile 0x%x not supported on a non "
397               "10-bit output stream"
398               " pixel format 0x%x",
399               __FUNCTION__, stream.dynamic_profile, stream.format);
400           return false;
401         }
402 
403         if ((static_cast<android_pixel_format_v1_1_t>(stream.format) ==
404              HAL_PIXEL_FORMAT_YCBCR_P010) &&
405             ((stream.data_space !=
406               static_cast<android_dataspace_t>(Dataspace::BT2020_ITU_HLG)) &&
407              (stream.data_space !=
408               static_cast<android_dataspace_t>(Dataspace::BT2020_HLG)) &&
409              (stream.data_space !=
410               static_cast<android_dataspace_t>(Dataspace::UNKNOWN)))) {
411           ALOGE(
412               "%s: Unsupported stream data space 0x%x for 10-bit YUV "
413               "output",
414               __FUNCTION__, stream.data_space);
415           return false;
416         }
417       }
418 
419       switch (stream.format) {
420         case HAL_PIXEL_FORMAT_BLOB:
421           if ((stream.data_space != HAL_DATASPACE_V0_JFIF) &&
422               (stream.data_space != HAL_DATASPACE_UNKNOWN)) {
423             ALOGE("%s: Unsupported Blob dataspace 0x%x", __FUNCTION__,
424                   stream.data_space);
425             return false;
426           }
427           if (stream.is_physical_camera_stream) {
428             stalling_stream_count[stream.physical_camera_id]++;
429           } else {
430             for (const auto& p : physical_map) {
431               stalling_stream_count[p.first]++;
432             }
433           }
434           break;
435         case HAL_PIXEL_FORMAT_RAW16: {
436           const SensorCharacteristics& sensor_char =
437               stream.is_physical_camera_stream
438                   ? sensor_chars.at(stream.physical_camera_id)
439                   : sensor_chars.at(logical_id);
440           auto sensor_height =
441               is_max_res ? sensor_char.full_res_height : sensor_char.height;
442           auto sensor_width =
443               is_max_res ? sensor_char.full_res_width : sensor_char.width;
444           if (stream.height != sensor_height || stream.width != sensor_width) {
445             ALOGE(
446                 "%s, RAW16 buffer height %d and width %d must match sensor "
447                 "height: %zu"
448                 " and width: %zu",
449                 __FUNCTION__, stream.height, stream.width, sensor_height,
450                 sensor_width);
451             return false;
452           }
453           if (stream.is_physical_camera_stream) {
454             raw_stream_count[stream.physical_camera_id]++;
455           } else {
456             for (const auto& p : physical_map) {
457               raw_stream_count[p.first]++;
458             }
459           }
460         } break;
461         default:
462           if (stream.is_physical_camera_stream) {
463             processed_stream_count[stream.physical_camera_id]++;
464           } else {
465             for (const auto& p : physical_map) {
466               processed_stream_count[p.first]++;
467             }
468           }
469       }
470 
471       auto output_sizes =
472           is_dynamic_output
473               ? physical_map.at(stream.physical_camera_id)
474                     ->GetDynamicPhysicalStreamOutputSizes(stream.format)
475               : stream.is_physical_camera_stream
476                     ? physical_map.at(stream.physical_camera_id)
477                           ->GetOutputSizes(stream.format)
478                     : config_map.GetOutputSizes(stream.format);
479 
480       auto stream_size = std::make_pair(stream.width, stream.height);
481       if (output_sizes.find(stream_size) == output_sizes.end()) {
482         ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
483               __FUNCTION__, stream.width, stream.height, stream.format);
484         return false;
485       }
486     }
487 
488     if (!sensor_chars.at(logical_id).support_stream_use_case) {
489       if (stream.use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
490         ALOGE("%s: Camera device doesn't support non-default stream use case!",
491               __FUNCTION__);
492         return false;
493       }
494     } else if (stream.use_case >
495                ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL) {
496       ALOGE("%s: Stream with use case %d is not supported!", __FUNCTION__,
497             stream.use_case);
498       return false;
499     } else if (stream.use_case !=
500                ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
501       if (stream.use_case ==
502               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE) {
503         if (stream.format != HAL_PIXEL_FORMAT_YCBCR_420_888 &&
504             stream.format != HAL_PIXEL_FORMAT_BLOB) {
505           ALOGE("%s: Stream with use case %d isn't compatible with format %d",
506               __FUNCTION__, stream.use_case, stream.format);
507           return false;
508         }
509       } else if (stream.format != HAL_PIXEL_FORMAT_YCBCR_420_888 &&
510                  stream.format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
511         ALOGE("%s: Stream with use case %d isn't compatible with format %d",
512               __FUNCTION__, stream.use_case, stream.format);
513         return false;
514       }
515     }
516   }
517 
518   for (const auto& raw_count : raw_stream_count) {
519     unsigned int max_raw_streams =
520         sensor_chars.at(raw_count.first).max_raw_streams +
521         (is_max_res
522              ? 1
523              : 0);  // The extra raw stream is allowed for remosaic reprocessing.
524     if (raw_count.second > max_raw_streams) {
525       ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
526             __FUNCTION__, raw_count.second, max_raw_streams);
527       return false;
528     }
529   }
530 
531   for (const auto& stalling_count : stalling_stream_count) {
532     if (stalling_count.second >
533         sensor_chars.at(stalling_count.first).max_stalling_streams) {
534       ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
535             __FUNCTION__, stalling_count.second,
536             sensor_chars.at(stalling_count.first).max_stalling_streams);
537       return false;
538     }
539   }
540 
541   for (const auto& processed_count : processed_stream_count) {
542     if (processed_count.second >
543         sensor_chars.at(processed_count.first).max_processed_streams) {
544       ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
545             __FUNCTION__, processed_count.second,
546             sensor_chars.at(processed_count.first).max_processed_streams);
547       return false;
548     }
549   }
550 
551   if (input_stream_count > sensor_chars.at(logical_id).max_input_streams) {
552     ALOGE("%s: Input stream maximum %u exceeds supported maximum %u",
553           __FUNCTION__, input_stream_count,
554           sensor_chars.at(logical_id).max_input_streams);
555     return false;
556   }
557 
558   return true;
559 }
560 
StartUp(uint32_t logical_camera_id,std::unique_ptr<LogicalCharacteristics> logical_chars)561 status_t EmulatedSensor::StartUp(
562     uint32_t logical_camera_id,
563     std::unique_ptr<LogicalCharacteristics> logical_chars) {
564   if (isRunning()) {
565     return OK;
566   }
567 
568   if (logical_chars.get() == nullptr) {
569     return BAD_VALUE;
570   }
571 
572   chars_ = std::move(logical_chars);
573   auto device_chars = chars_->find(logical_camera_id);
574   if (device_chars == chars_->end()) {
575     ALOGE(
576         "%s: Logical camera id: %u absent from logical camera characteristics!",
577         __FUNCTION__, logical_camera_id);
578     return BAD_VALUE;
579   }
580 
581   for (const auto& it : *chars_) {
582     if (!AreCharacteristicsSupported(it.second)) {
583       ALOGE("%s: Sensor characteristics for camera id: %u not supported!",
584             __FUNCTION__, it.first);
585       return BAD_VALUE;
586     }
587   }
588 
589   logical_camera_id_ = logical_camera_id;
590   scene_ = std::make_unique<EmulatedScene>(
591       device_chars->second.full_res_width, device_chars->second.full_res_height,
592       kElectronsPerLuxSecond, device_chars->second.orientation,
593       device_chars->second.is_front_facing);
594   jpeg_compressor_ = std::make_unique<JpegCompressor>();
595 
596   auto res = run(LOG_TAG, ANDROID_PRIORITY_URGENT_DISPLAY);
597   if (res != OK) {
598     ALOGE("Unable to start up sensor capture thread: %d", res);
599   }
600 
601   return res;
602 }
603 
ShutDown()604 status_t EmulatedSensor::ShutDown() {
605   int res;
606   res = requestExitAndWait();
607   if (res != OK) {
608     ALOGE("Unable to shut down sensor capture thread: %d", res);
609   }
610   return res;
611 }
612 
SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,std::unique_ptr<HwlPipelineResult> result,std::unique_ptr<Buffers> input_buffers,std::unique_ptr<Buffers> output_buffers)613 void EmulatedSensor::SetCurrentRequest(
614     std::unique_ptr<LogicalCameraSettings> logical_settings,
615     std::unique_ptr<HwlPipelineResult> result,
616     std::unique_ptr<Buffers> input_buffers,
617     std::unique_ptr<Buffers> output_buffers) {
618   Mutex::Autolock lock(control_mutex_);
619   current_settings_ = std::move(logical_settings);
620   current_result_ = std::move(result);
621   current_input_buffers_ = std::move(input_buffers);
622   current_output_buffers_ = std::move(output_buffers);
623 }
624 
WaitForVSyncLocked(nsecs_t reltime)625 bool EmulatedSensor::WaitForVSyncLocked(nsecs_t reltime) {
626   got_vsync_ = false;
627   while (!got_vsync_) {
628     auto res = vsync_.waitRelative(control_mutex_, reltime);
629     if (res != OK && res != TIMED_OUT) {
630       ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
631       return false;
632     }
633   }
634 
635   return got_vsync_;
636 }
637 
WaitForVSync(nsecs_t reltime)638 bool EmulatedSensor::WaitForVSync(nsecs_t reltime) {
639   Mutex::Autolock lock(control_mutex_);
640 
641   return WaitForVSyncLocked(reltime);
642 }
643 
Flush()644 status_t EmulatedSensor::Flush() {
645   Mutex::Autolock lock(control_mutex_);
646   auto ret = WaitForVSyncLocked(kSupportedFrameDurationRange[1]);
647 
648   // First recreate the jpeg compressor. This will abort any ongoing processing
649   // and flush any pending jobs.
650   jpeg_compressor_ = std::make_unique<JpegCompressor>();
651 
652   // Then return any pending frames here
653   if ((current_input_buffers_.get() != nullptr) &&
654       (!current_input_buffers_->empty())) {
655     current_input_buffers_->clear();
656   }
657   if ((current_output_buffers_.get() != nullptr) &&
658       (!current_output_buffers_->empty())) {
659     for (const auto& buffer : *current_output_buffers_) {
660       buffer->stream_buffer.status = BufferStatus::kError;
661     }
662 
663     if ((current_result_.get() != nullptr) &&
664         (current_result_->result_metadata.get() != nullptr)) {
665       if (current_output_buffers_->at(0)->callback.notify != nullptr) {
666         NotifyMessage msg{
667             .type = MessageType::kError,
668             .message.error = {
669                 .frame_number = current_output_buffers_->at(0)->frame_number,
670                 .error_stream_id = -1,
671                 .error_code = ErrorCode::kErrorResult,
672             }};
673 
674         current_output_buffers_->at(0)->callback.notify(
675             current_result_->pipeline_id, msg);
676       }
677     }
678 
679     current_output_buffers_->clear();
680   }
681 
682   return ret ? OK : TIMED_OUT;
683 }
684 
threadLoop()685 bool EmulatedSensor::threadLoop() {
686   ATRACE_CALL();
687   /**
688    * Sensor capture operation main loop.
689    *
690    */
691 
692   /**
693    * Stage 1: Read in latest control parameters
694    */
695   std::unique_ptr<Buffers> next_buffers;
696   std::unique_ptr<Buffers> next_input_buffer;
697   std::unique_ptr<HwlPipelineResult> next_result;
698   std::unique_ptr<LogicalCameraSettings> settings;
699   HwlPipelineCallback callback = {nullptr, nullptr};
700   {
701     Mutex::Autolock lock(control_mutex_);
702     std::swap(settings, current_settings_);
703     std::swap(next_buffers, current_output_buffers_);
704     std::swap(next_input_buffer, current_input_buffers_);
705     std::swap(next_result, current_result_);
706 
707     // Signal VSync for start of readout
708     ALOGVV("Sensor VSync");
709     got_vsync_ = true;
710     vsync_.signal();
711   }
712 
713   auto frame_duration = EmulatedSensor::kSupportedFrameDurationRange[0];
714   auto exposure_time = EmulatedSensor::kSupportedExposureTimeRange[0];
715   // Frame duration must always be the same among all physical devices
716   if ((settings.get() != nullptr) && (!settings->empty())) {
717     frame_duration = settings->begin()->second.frame_duration;
718     exposure_time = settings->begin()->second.exposure_time;
719   }
720 
721   nsecs_t start_real_time = systemTime();
722   // Stagefright cares about system time for timestamps, so base simulated
723   // time on that.
724   nsecs_t frame_end_real_time = start_real_time + frame_duration;
725 
726   /**
727    * Stage 2: Capture new image
728    */
729   next_capture_time_ = frame_end_real_time;
730   next_readout_time_ = frame_end_real_time + exposure_time;
731 
732   sensor_binning_factor_info_.clear();
733 
734   bool reprocess_request = false;
735   if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
736     if (next_input_buffer->size() > 1) {
737       ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
738     }
739 
740       camera_metadata_ro_entry_t entry;
741       auto ret =
742           next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
743       if ((ret == OK) && (entry.count == 1)) {
744         next_capture_time_ = entry.data.i64[0];
745       } else {
746         ALOGW("%s: Reprocess timestamp absent!", __FUNCTION__);
747       }
748 
749       ret = next_result->result_metadata->Get(ANDROID_SENSOR_EXPOSURE_TIME,
750                                               &entry);
751       if ((ret == OK) && (entry.count == 1)) {
752         next_readout_time_ = next_capture_time_ + entry.data.i64[0];
753       } else {
754         next_readout_time_ = next_capture_time_;
755       }
756 
757       reprocess_request = true;
758   }
759 
760   if ((next_buffers != nullptr) && (settings != nullptr)) {
761     callback = next_buffers->at(0)->callback;
762     if (callback.notify != nullptr) {
763       NotifyMessage msg{
764           .type = MessageType::kShutter,
765           .message.shutter = {
766               .frame_number = next_buffers->at(0)->frame_number,
767               .timestamp_ns = static_cast<uint64_t>(next_capture_time_),
768               .readout_timestamp_ns =
769                   static_cast<uint64_t>(next_readout_time_)}};
770       callback.notify(next_result->pipeline_id, msg);
771     }
772     auto b = next_buffers->begin();
773     while (b != next_buffers->end()) {
774       auto device_settings = settings->find((*b)->camera_id);
775       if (device_settings == settings->end()) {
776         ALOGE("%s: Sensor settings absent for device: %d", __func__,
777               (*b)->camera_id);
778         b = next_buffers->erase(b);
779         continue;
780       }
781 
782       auto device_chars = chars_->find((*b)->camera_id);
783       if (device_chars == chars_->end()) {
784         ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
785               (*b)->camera_id);
786         b = next_buffers->erase(b);
787         continue;
788       }
789 
790       sensor_binning_factor_info_[(*b)->camera_id].quad_bayer_sensor =
791           device_chars->second.quad_bayer_sensor;
792 
793       ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
794              ns2ms(device_settings->second.exposure_time),
795              device_settings->second.gain);
796 
797       scene_->Initialize(device_chars->second.full_res_width,
798                          device_chars->second.full_res_height,
799                          kElectronsPerLuxSecond);
800       scene_->SetExposureDuration((float)device_settings->second.exposure_time /
801                                   1e9);
802       scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
803                                 device_chars->second.color_filter.rY,
804                                 device_chars->second.color_filter.rZ,
805                                 device_chars->second.color_filter.grX,
806                                 device_chars->second.color_filter.grY,
807                                 device_chars->second.color_filter.grZ,
808                                 device_chars->second.color_filter.gbX,
809                                 device_chars->second.color_filter.gbY,
810                                 device_chars->second.color_filter.gbZ,
811                                 device_chars->second.color_filter.bX,
812                                 device_chars->second.color_filter.bY,
813                                 device_chars->second.color_filter.bZ);
814       scene_->SetTestPattern(device_settings->second.test_pattern_mode ==
815                              ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
816       scene_->SetTestPatternData(device_settings->second.test_pattern_data);
817       scene_->SetScreenRotation(device_settings->second.screen_rotation);
818 
819       uint32_t handshake_divider =
820           (device_settings->second.video_stab ==
821            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) ||
822                   (device_settings->second.video_stab ==
823                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION)
824               ? kReducedSceneHandshake
825               : kRegularSceneHandshake;
826       scene_->CalculateScene(next_capture_time_, handshake_divider);
827 
828       (*b)->stream_buffer.status = BufferStatus::kOk;
829       bool max_res_mode = device_settings->second.sensor_pixel_mode;
830       sensor_binning_factor_info_[(*b)->camera_id].max_res_request =
831           max_res_mode;
832       switch ((*b)->format) {
833         case PixelFormat::RAW16:
834           sensor_binning_factor_info_[(*b)->camera_id].has_raw_stream = true;
835           break;
836         default:
837           sensor_binning_factor_info_[(*b)->camera_id].has_non_raw_stream = true;
838       }
839 
840       // TODO: remove hack. Implement RAW -> YUV / JPEG reprocessing http://b/192382904
841       bool treat_as_reprocess =
842           (device_chars->second.quad_bayer_sensor && reprocess_request &&
843            (*next_input_buffer->begin())->format == PixelFormat::RAW16)
844               ? false
845               : reprocess_request;
846 
847       switch ((*b)->format) {
848         case PixelFormat::RAW16:
849           if (!reprocess_request) {
850             uint64_t min_full_res_raw_size =
851                 2 * device_chars->second.full_res_width *
852                 device_chars->second.full_res_height;
853             uint64_t min_default_raw_size =
854                 2 * device_chars->second.width * device_chars->second.height;
855             bool default_mode_for_qb =
856                 device_chars->second.quad_bayer_sensor && !max_res_mode;
857             size_t buffer_size = (*b)->plane.img.buffer_size;
858             if (default_mode_for_qb) {
859               if (buffer_size < min_default_raw_size) {
860                 ALOGE(
861                     "%s: Output buffer size too small for RAW capture in "
862                     "default "
863                     "mode, "
864                     "expected %" PRIu64 ", got %zu, for camera id %d",
865                     __FUNCTION__, min_default_raw_size, buffer_size,
866                     (*b)->camera_id);
867                 (*b)->stream_buffer.status = BufferStatus::kError;
868                 break;
869               }
870             } else if (buffer_size < min_full_res_raw_size) {
871               ALOGE(
872                   "%s: Output buffer size too small for RAW capture in max res "
873                   "mode, "
874                   "expected %" PRIu64 ", got %zu, for camera id %d",
875                   __FUNCTION__, min_full_res_raw_size, buffer_size,
876                   (*b)->camera_id);
877               (*b)->stream_buffer.status = BufferStatus::kError;
878               break;
879             }
880             if (default_mode_for_qb) {
881               CaptureRawBinned(
882                   (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
883                   device_settings->second.gain, device_chars->second);
884             } else {
885               CaptureRawFullRes(
886                   (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
887                   device_settings->second.gain, device_chars->second);
888             }
889           } else {
890             if (!device_chars->second.quad_bayer_sensor) {
891               ALOGE(
892                   "%s: Reprocess requests with output format %x no supported!",
893                   __FUNCTION__, (*b)->format);
894               (*b)->stream_buffer.status = BufferStatus::kError;
895               break;
896             }
897             // Remosaic the RAW input buffer
898             if ((*next_input_buffer->begin())->width != (*b)->width ||
899                 (*next_input_buffer->begin())->height != (*b)->height) {
900               ALOGE(
901                   "%s: RAW16 input dimensions %dx%d don't match output buffer "
902                   "dimensions %dx%d",
903                   __FUNCTION__, (*next_input_buffer->begin())->width,
904                   (*next_input_buffer->begin())->height, (*b)->width,
905                   (*b)->height);
906               (*b)->stream_buffer.status = BufferStatus::kError;
907               break;
908             }
909             ALOGV("%s remosaic Raw16 Image", __FUNCTION__);
910             RemosaicRAW16Image(
911                 (uint16_t*)(*next_input_buffer->begin())->plane.img.img,
912                 (uint16_t*)(*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
913                 device_chars->second);
914           }
915           break;
916         case PixelFormat::RGB_888:
917           if (!reprocess_request) {
918             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
919                        (*b)->plane.img.stride_in_bytes, RGBLayout::RGB,
920                        device_settings->second.gain, device_chars->second);
921           } else {
922             ALOGE("%s: Reprocess requests with output format %x no supported!",
923                   __FUNCTION__, (*b)->format);
924             (*b)->stream_buffer.status = BufferStatus::kError;
925           }
926           break;
927         case PixelFormat::RGBA_8888:
928           if (!reprocess_request) {
929             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
930                        (*b)->plane.img.stride_in_bytes, RGBLayout::RGBA,
931                        device_settings->second.gain, device_chars->second);
932           } else {
933             ALOGE("%s: Reprocess requests with output format %x no supported!",
934                   __FUNCTION__, (*b)->format);
935             (*b)->stream_buffer.status = BufferStatus::kError;
936           }
937           break;
938         case PixelFormat::BLOB:
939           if ((*b)->dataSpace == HAL_DATASPACE_V0_JFIF) {
940             YUV420Frame yuv_input{
941                 .width = treat_as_reprocess
942                              ? (*next_input_buffer->begin())->width
943                              : 0,
944                 .height = treat_as_reprocess
945                               ? (*next_input_buffer->begin())->height
946                               : 0,
947                 .planes = treat_as_reprocess
948                               ? (*next_input_buffer->begin())->plane.img_y_crcb
949                               : YCbCrPlanes{}};
950             auto jpeg_input = std::make_unique<JpegYUV420Input>();
951             jpeg_input->width = (*b)->width;
952             jpeg_input->height = (*b)->height;
953             auto img =
954                 new uint8_t[(jpeg_input->width * jpeg_input->height * 3) / 2];
955             jpeg_input->yuv_planes = {
956                 .img_y = img,
957                 .img_cb = img + jpeg_input->width * jpeg_input->height,
958                 .img_cr = img + (jpeg_input->width * jpeg_input->height * 5) / 4,
959                 .y_stride = jpeg_input->width,
960                 .cbcr_stride = jpeg_input->width / 2,
961                 .cbcr_step = 1};
962             jpeg_input->buffer_owner = true;
963             YUV420Frame yuv_output{.width = jpeg_input->width,
964                                    .height = jpeg_input->height,
965                                    .planes = jpeg_input->yuv_planes};
966 
967             bool rotate =
968                 device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
969             ProcessType process_type =
970                 treat_as_reprocess ? REPROCESS
971                                    : (device_settings->second.edge_mode ==
972                                       ANDROID_EDGE_MODE_HIGH_QUALITY)
973                                          ? HIGH_QUALITY
974                                          : REGULAR;
975             auto ret = ProcessYUV420(
976                 yuv_input, yuv_output, device_settings->second.gain,
977                 process_type, device_settings->second.zoom_ratio,
978                 rotate, device_chars->second);
979             if (ret != 0) {
980               (*b)->stream_buffer.status = BufferStatus::kError;
981               break;
982             }
983 
984             auto jpeg_job = std::make_unique<JpegYUV420Job>();
985             jpeg_job->exif_utils = std::unique_ptr<ExifUtils>(
986                 ExifUtils::Create(device_chars->second));
987             jpeg_job->input = std::move(jpeg_input);
988             // If jpeg compression is successful, then the jpeg compressor
989             // must set the corresponding status.
990             (*b)->stream_buffer.status = BufferStatus::kError;
991             std::swap(jpeg_job->output, *b);
992             jpeg_job->result_metadata =
993                 HalCameraMetadata::Clone(next_result->result_metadata.get());
994 
995             Mutex::Autolock lock(control_mutex_);
996             jpeg_compressor_->QueueYUV420(std::move(jpeg_job));
997           } else {
998             ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
999                   (*b)->format, (*b)->dataSpace);
1000             (*b)->stream_buffer.status = BufferStatus::kError;
1001           }
1002           break;
1003         case PixelFormat::YCRCB_420_SP:
1004         case PixelFormat::YCBCR_420_888: {
1005           YUV420Frame yuv_input{
1006               .width =
1007                   treat_as_reprocess ? (*next_input_buffer->begin())->width : 0,
1008               .height =
1009                   treat_as_reprocess ? (*next_input_buffer->begin())->height : 0,
1010               .planes = treat_as_reprocess
1011                             ? (*next_input_buffer->begin())->plane.img_y_crcb
1012                             : YCbCrPlanes{}};
1013           YUV420Frame yuv_output{.width = (*b)->width,
1014                                  .height = (*b)->height,
1015                                  .planes = (*b)->plane.img_y_crcb};
1016           bool rotate =
1017               device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
1018           ProcessType process_type = treat_as_reprocess
1019                                          ? REPROCESS
1020                                          : (device_settings->second.edge_mode ==
1021                                             ANDROID_EDGE_MODE_HIGH_QUALITY)
1022                                                ? HIGH_QUALITY
1023                                                : REGULAR;
1024           auto ret = ProcessYUV420(
1025               yuv_input, yuv_output, device_settings->second.gain,
1026               process_type, device_settings->second.zoom_ratio,
1027               rotate, device_chars->second);
1028           if (ret != 0) {
1029             (*b)->stream_buffer.status = BufferStatus::kError;
1030           }
1031         } break;
1032         case PixelFormat::Y16:
1033           if (!reprocess_request) {
1034             if ((*b)->dataSpace == HAL_DATASPACE_DEPTH) {
1035               CaptureDepth((*b)->plane.img.img, device_settings->second.gain,
1036                            (*b)->width, (*b)->height,
1037                            (*b)->plane.img.stride_in_bytes,
1038                            device_chars->second);
1039             } else {
1040               ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
1041                     (*b)->format, (*b)->dataSpace);
1042               (*b)->stream_buffer.status = BufferStatus::kError;
1043             }
1044           } else {
1045             ALOGE("%s: Reprocess requests with output format %x no supported!",
1046                   __FUNCTION__, (*b)->format);
1047             (*b)->stream_buffer.status = BufferStatus::kError;
1048           }
1049           break;
1050         case PixelFormat::YCBCR_P010:
1051             if (!reprocess_request) {
1052               bool rotate = device_settings->second.rotate_and_crop ==
1053                             ANDROID_SCALER_ROTATE_AND_CROP_90;
1054               CaptureYUV420((*b)->plane.img_y_crcb, (*b)->width, (*b)->height,
1055                             device_settings->second.gain,
1056                             device_settings->second.zoom_ratio, rotate,
1057                             device_chars->second);
1058             } else {
1059               ALOGE(
1060                   "%s: Reprocess requests with output format %x no supported!",
1061                   __FUNCTION__, (*b)->format);
1062               (*b)->stream_buffer.status = BufferStatus::kError;
1063             }
1064             break;
1065         default:
1066           ALOGE("%s: Unknown format %x, no output", __FUNCTION__, (*b)->format);
1067           (*b)->stream_buffer.status = BufferStatus::kError;
1068           break;
1069       }
1070 
1071       b = next_buffers->erase(b);
1072     }
1073   }
1074 
1075   if (reprocess_request) {
1076     auto input_buffer = next_input_buffer->begin();
1077     while (input_buffer != next_input_buffer->end()) {
1078       (*input_buffer++)->stream_buffer.status = BufferStatus::kOk;
1079     }
1080     next_input_buffer->clear();
1081   }
1082 
1083   nsecs_t work_done_real_time = systemTime();
1084   // Returning the results at this point is not entirely correct from timing
1085   // perspective. Under ideal conditions where 'ReturnResults' completes
1086   // in less than 'time_accuracy' we need to return the results after the
1087   // frame cycle expires. However under real conditions various system
1088   // components like SurfaceFlinger, Encoder, LMK etc. could be consuming most
1089   // of the resources and the duration of "ReturnResults" can get comparable to
1090   // 'kDefaultFrameDuration'. This will skew the frame cycle and can result in
1091   // potential frame drops. To avoid this scenario when we are running under
1092   // tight deadlines (less than 'kReturnResultThreshod') try to return the
1093   // results immediately. In all other cases with more relaxed deadlines
1094   // the occasional bump during 'ReturnResults' should not have any
1095   // noticeable effect.
1096   if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
1097     ReturnResults(callback, std::move(settings), std::move(next_result),
1098                   reprocess_request);
1099   }
1100 
1101   work_done_real_time = systemTime();
1102   ALOGVV("Sensor vertical blanking interval");
1103   const nsecs_t time_accuracy = 2e6;  // 2 ms of imprecision is ok
1104   if (work_done_real_time < frame_end_real_time - time_accuracy) {
1105     timespec t;
1106     t.tv_sec = (frame_end_real_time - work_done_real_time) / 1000000000L;
1107     t.tv_nsec = (frame_end_real_time - work_done_real_time) % 1000000000L;
1108 
1109     int ret;
1110     do {
1111       ret = nanosleep(&t, &t);
1112     } while (ret != 0);
1113   }
1114 
1115   ReturnResults(callback, std::move(settings), std::move(next_result),
1116                 reprocess_request);
1117 
1118   return true;
1119 };
1120 
ReturnResults(HwlPipelineCallback callback,std::unique_ptr<LogicalCameraSettings> settings,std::unique_ptr<HwlPipelineResult> result,bool reprocess_request)1121 void EmulatedSensor::ReturnResults(
1122     HwlPipelineCallback callback,
1123     std::unique_ptr<LogicalCameraSettings> settings,
1124     std::unique_ptr<HwlPipelineResult> result, bool reprocess_request) {
1125   if ((callback.process_pipeline_result != nullptr) &&
1126       (result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
1127     auto logical_settings = settings->find(logical_camera_id_);
1128     if (logical_settings == settings->end()) {
1129       ALOGE("%s: Logical camera id: %u not found in settings!", __FUNCTION__,
1130             logical_camera_id_);
1131       return;
1132     }
1133     auto device_chars = chars_->find(logical_camera_id_);
1134     if (device_chars == chars_->end()) {
1135       ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1136             logical_camera_id_);
1137       return;
1138     }
1139     result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
1140                                  1);
1141     uint8_t raw_binned_factor_used = false;
1142     if (sensor_binning_factor_info_.find(logical_camera_id_) !=
1143         sensor_binning_factor_info_.end()) {
1144       auto& info = sensor_binning_factor_info_[logical_camera_id_];
1145       // Logical stream was included in the request
1146       if (!reprocess_request && info.quad_bayer_sensor && info.max_res_request &&
1147           info.has_raw_stream && !info.has_non_raw_stream) {
1148         raw_binned_factor_used = true;
1149       }
1150       result->result_metadata->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1151                                    &raw_binned_factor_used, 1);
1152     }
1153     if (logical_settings->second.lens_shading_map_mode ==
1154         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
1155       if ((device_chars->second.lens_shading_map_size[0] > 0) &&
1156           (device_chars->second.lens_shading_map_size[1] > 0)) {
1157         // Perfect lens, no actual shading needed.
1158         std::vector<float> lens_shading_map(
1159             device_chars->second.lens_shading_map_size[0] *
1160                 device_chars->second.lens_shading_map_size[1] * 4,
1161             1.f);
1162 
1163         result->result_metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP,
1164                                      lens_shading_map.data(),
1165                                      lens_shading_map.size());
1166       }
1167     }
1168     if (logical_settings->second.report_video_stab) {
1169       result->result_metadata->Set(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
1170                                    &logical_settings->second.video_stab, 1);
1171     }
1172     if (logical_settings->second.report_edge_mode) {
1173       result->result_metadata->Set(ANDROID_EDGE_MODE,
1174                                    &logical_settings->second.edge_mode, 1);
1175     }
1176     if (logical_settings->second.report_neutral_color_point) {
1177       result->result_metadata->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
1178                                    kNeutralColorPoint,
1179                                    ARRAY_SIZE(kNeutralColorPoint));
1180     }
1181     if (logical_settings->second.report_green_split) {
1182       result->result_metadata->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1183     }
1184     if (logical_settings->second.report_noise_profile) {
1185       CalculateAndAppendNoiseProfile(
1186           logical_settings->second.gain,
1187           GetBaseGainFactor(device_chars->second.max_raw_value),
1188           result->result_metadata.get());
1189     }
1190     if (logical_settings->second.report_rotate_and_crop) {
1191       result->result_metadata->Set(ANDROID_SCALER_ROTATE_AND_CROP,
1192           &logical_settings->second.rotate_and_crop, 1);
1193     }
1194 
1195     if (!result->physical_camera_results.empty()) {
1196       for (auto& it : result->physical_camera_results) {
1197         auto physical_settings = settings->find(it.first);
1198         if (physical_settings == settings->end()) {
1199           ALOGE("%s: Physical settings for camera id: %u are absent!",
1200                 __FUNCTION__, it.first);
1201           continue;
1202         }
1203         uint8_t raw_binned_factor_used = false;
1204         if (sensor_binning_factor_info_.find(it.first) !=
1205             sensor_binning_factor_info_.end()) {
1206           auto& info = sensor_binning_factor_info_[it.first];
1207           // physical stream was included in the request
1208           if (!reprocess_request && info.quad_bayer_sensor &&
1209               info.max_res_request && info.has_raw_stream &&
1210               !info.has_non_raw_stream) {
1211             raw_binned_factor_used = true;
1212           }
1213           it.second->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1214                          &raw_binned_factor_used, 1);
1215         }
1216         // Sensor timestamp for all physical devices must be the same.
1217         it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
1218         if (physical_settings->second.report_neutral_color_point) {
1219           it.second->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, kNeutralColorPoint,
1220                          ARRAY_SIZE(kNeutralColorPoint));
1221         }
1222         if (physical_settings->second.report_green_split) {
1223           it.second->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1224         }
1225         if (physical_settings->second.report_noise_profile) {
1226           auto device_chars = chars_->find(it.first);
1227           if (device_chars == chars_->end()) {
1228             ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1229                   it.first);
1230           }
1231           CalculateAndAppendNoiseProfile(
1232               physical_settings->second.gain,
1233               GetBaseGainFactor(device_chars->second.max_raw_value),
1234               it.second.get());
1235         }
1236       }
1237     }
1238 
1239     callback.process_pipeline_result(std::move(result));
1240   }
1241 }
1242 
CalculateAndAppendNoiseProfile(float gain,float base_gain_factor,HalCameraMetadata * result)1243 void EmulatedSensor::CalculateAndAppendNoiseProfile(
1244     float gain /*in ISO*/, float base_gain_factor,
1245     HalCameraMetadata* result /*out*/) {
1246   if (result != nullptr) {
1247     float total_gain = gain / 100.0 * base_gain_factor;
1248     float noise_var_gain = total_gain * total_gain;
1249     float read_noise_var =
1250         kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1251     // Noise profile is the same across all 4 CFA channels
1252     double noise_profile[2 * 4] = {
1253         noise_var_gain, read_noise_var, noise_var_gain, read_noise_var,
1254         noise_var_gain, read_noise_var, noise_var_gain, read_noise_var};
1255     result->Set(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
1256                 ARRAY_SIZE(noise_profile));
1257   }
1258 }
1259 
GetQuadBayerColor(uint32_t x,uint32_t y)1260 EmulatedScene::ColorChannels EmulatedSensor::GetQuadBayerColor(uint32_t x,
1261                                                                uint32_t y) {
1262   // Row within larger set of quad bayer filter
1263   uint32_t row_mod = y % 4;
1264   // Column within larger set of quad bayer filter
1265   uint32_t col_mod = x % 4;
1266 
1267   // Row is within the left quadrants of a quad bayer sensor
1268   if (row_mod < 2) {
1269     if (col_mod < 2) {
1270       return EmulatedScene::ColorChannels::R;
1271     }
1272     return EmulatedScene::ColorChannels::Gr;
1273   } else {
1274     if (col_mod < 2) {
1275       return EmulatedScene::ColorChannels::Gb;
1276     }
1277     return EmulatedScene::ColorChannels::B;
1278   }
1279 }
1280 
RemosaicQuadBayerBlock(uint16_t * img_in,uint16_t * img_out,int xstart,int ystart,int row_stride_in_bytes)1281 void EmulatedSensor::RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
1282                                             int xstart, int ystart,
1283                                             int row_stride_in_bytes) {
1284   uint32_t quad_block_copy_idx_map[16] = {0, 2, 1, 3, 8,  10, 6,  11,
1285                                           4, 9, 5, 7, 12, 14, 13, 15};
1286   uint16_t quad_block_copy[16];
1287   uint32_t i = 0;
1288   for (uint32_t row = 0; row < 4; row++) {
1289     uint16_t* quad_bayer_row =
1290         img_in + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1291     for (uint32_t j = 0; j < 4; j++, i++) {
1292       quad_block_copy[i] = quad_bayer_row[j];
1293     }
1294   }
1295 
1296   for (uint32_t row = 0; row < 4; row++) {
1297     uint16_t* regular_bayer_row =
1298         img_out + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1299     for (uint32_t j = 0; j < 4; j++, i++) {
1300       uint32_t idx = quad_block_copy_idx_map[row + 4 * j];
1301       regular_bayer_row[j] = quad_block_copy[idx];
1302     }
1303   }
1304 }
1305 
RemosaicRAW16Image(uint16_t * img_in,uint16_t * img_out,size_t row_stride_in_bytes,const SensorCharacteristics & chars)1306 status_t EmulatedSensor::RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
1307                                             size_t row_stride_in_bytes,
1308                                             const SensorCharacteristics& chars) {
1309   if (chars.full_res_width % 2 != 0 || chars.full_res_height % 2 != 0) {
1310     ALOGE(
1311         "%s RAW16 Image with quad CFA, height %zu and width %zu, not multiples "
1312         "of 4",
1313         __FUNCTION__, chars.full_res_height, chars.full_res_width);
1314     return BAD_VALUE;
1315   }
1316   for (uint32_t i = 0; i < chars.full_res_width; i += 4) {
1317     for (uint32_t j = 0; j < chars.full_res_height; j += 4) {
1318       RemosaicQuadBayerBlock(img_in, img_out, i, j, row_stride_in_bytes);
1319     }
1320   }
1321   return OK;
1322 }
1323 
CaptureRawBinned(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1324 void EmulatedSensor::CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes,
1325                                       uint32_t gain,
1326                                       const SensorCharacteristics& chars) {
1327   ATRACE_CALL();
1328   // inc = how many pixels to skip while reading every next pixel
1329   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1330   float noise_var_gain = total_gain * total_gain;
1331   float read_noise_var =
1332       kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1333   int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
1334                          EmulatedScene::B};
1335   scene_->SetReadoutPixel(0, 0);
1336   for (unsigned int out_y = 0; out_y < chars.height; out_y++) {
1337     // Stride still stays width since the buffer is binned size.
1338     int* bayer_row = bayer_select + (out_y & 0x1) * 2;
1339     uint16_t* px = (uint16_t*)img + out_y * (row_stride_in_bytes / 2);
1340     for (unsigned int out_x = 0; out_x < chars.width; out_x++) {
1341       int color_idx = bayer_row[out_x & 0x1];
1342       uint16_t raw_count = 0;
1343       // Color  filter will be the same for each quad.
1344       uint32_t electron_count = 0;
1345       int x, y;
1346       float norm_x = (float)out_x / chars.width;
1347       float norm_y = (float)out_y / chars.height;
1348       x = static_cast<int>(chars.full_res_width * norm_x);
1349       y = static_cast<int>(chars.full_res_height * norm_y);
1350 
1351       x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1352       y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1353 
1354       scene_->SetReadoutPixel(x, y);
1355 
1356       const uint32_t* pixel = scene_->GetPixelElectrons();
1357       electron_count = pixel[color_idx];
1358       // TODO: Better pixel saturation curve?
1359       electron_count = (electron_count < kSaturationElectrons)
1360                            ? electron_count
1361                            : kSaturationElectrons;
1362 
1363       // TODO: Better A/D saturation curve?
1364       raw_count = electron_count * total_gain;
1365       raw_count =
1366           (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
1367 
1368       // Calculate noise value
1369       // TODO: Use more-correct Gaussian instead of uniform noise
1370       float photon_noise_var = electron_count * noise_var_gain;
1371       float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
1372       // Scaled to roughly match gaussian/uniform noise stddev
1373       float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1374 
1375       raw_count += chars.black_level_pattern[color_idx];
1376       raw_count += noise_stddev * noise_sample;
1377       *px++ = raw_count;
1378     }
1379   }
1380   ALOGVV("Binned RAW sensor image captured");
1381 }
1382 
CaptureRawFullRes(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1383 void EmulatedSensor::CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
1384                                        uint32_t gain,
1385                                        const SensorCharacteristics& chars) {
1386   ATRACE_CALL();
1387   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1388   float noise_var_gain = total_gain * total_gain;
1389   float read_noise_var =
1390       kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1391 
1392   scene_->SetReadoutPixel(0, 0);
1393   // RGGB
1394   int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
1395                          EmulatedScene::B};
1396 
1397   for (unsigned int y = 0; y < chars.full_res_height; y++) {
1398     int* bayer_row = bayer_select + (y & 0x1) * 2;
1399     uint16_t* px = (uint16_t*)img + y * (row_stride_in_bytes / 2);
1400     for (unsigned int x = 0; x < chars.full_res_width; x++) {
1401       int color_idx = chars.quad_bayer_sensor ? GetQuadBayerColor(x, y)
1402                                               : bayer_row[x & 0x1];
1403       uint32_t electron_count;
1404       electron_count = scene_->GetPixelElectrons()[color_idx];
1405 
1406       // TODO: Better pixel saturation curve?
1407       electron_count = (electron_count < kSaturationElectrons)
1408                            ? electron_count
1409                            : kSaturationElectrons;
1410 
1411       // TODO: Better A/D saturation curve?
1412       uint16_t raw_count = electron_count * total_gain;
1413       raw_count =
1414           (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
1415 
1416       // Calculate noise value
1417       // TODO: Use more-correct Gaussian instead of uniform noise
1418       float photon_noise_var = electron_count * noise_var_gain;
1419       float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
1420       // Scaled to roughly match gaussian/uniform noise stddev
1421       float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1422 
1423       raw_count += chars.black_level_pattern[color_idx];
1424       raw_count += noise_stddev * noise_sample;
1425 
1426       *px++ = raw_count;
1427     }
1428     // TODO: Handle this better
1429     // simulatedTime += mRowReadoutTime;
1430   }
1431   ALOGVV("Raw sensor image captured");
1432 }
1433 
CaptureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,RGBLayout layout,uint32_t gain,const SensorCharacteristics & chars)1434 void EmulatedSensor::CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
1435                                 uint32_t stride, RGBLayout layout, uint32_t gain,
1436                                 const SensorCharacteristics& chars) {
1437   ATRACE_CALL();
1438   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1439   // In fixed-point math, calculate total scaling from electrons to 8bpp
1440   int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
1441   uint32_t inc_h = ceil((float)chars.full_res_width / width);
1442   uint32_t inc_v = ceil((float)chars.full_res_height / height);
1443 
1444   for (unsigned int y = 0, outy = 0; y < chars.full_res_height;
1445        y += inc_v, outy++) {
1446     scene_->SetReadoutPixel(0, y);
1447     uint8_t* px = img + outy * stride;
1448     for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1449       uint32_t r_count, g_count, b_count;
1450       // TODO: Perfect demosaicing is a cheat
1451       const uint32_t* pixel = scene_->GetPixelElectrons();
1452       r_count = pixel[EmulatedScene::R] * scale64x;
1453       g_count = pixel[EmulatedScene::Gr] * scale64x;
1454       b_count = pixel[EmulatedScene::B] * scale64x;
1455 
1456       uint8_t r = r_count < 255 * 64 ? r_count / 64 : 255;
1457       uint8_t g = g_count < 255 * 64 ? g_count / 64 : 255;
1458       uint8_t b = b_count < 255 * 64 ? b_count / 64 : 255;
1459       switch (layout) {
1460         case RGB:
1461           *px++ = r;
1462           *px++ = g;
1463           *px++ = b;
1464           break;
1465         case RGBA:
1466           *px++ = r;
1467           *px++ = g;
1468           *px++ = b;
1469           *px++ = 255;
1470           break;
1471         case ARGB:
1472           *px++ = 255;
1473           *px++ = r;
1474           *px++ = g;
1475           *px++ = b;
1476           break;
1477         default:
1478           ALOGE("%s: RGB layout: %d not supported", __FUNCTION__, layout);
1479           return;
1480       }
1481       for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1482     }
1483   }
1484   ALOGVV("RGB sensor image captured");
1485 }
1486 
CaptureYUV420(YCbCrPlanes yuv_layout,uint32_t width,uint32_t height,uint32_t gain,float zoom_ratio,bool rotate,const SensorCharacteristics & chars)1487 void EmulatedSensor::CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width,
1488                                    uint32_t height, uint32_t gain,
1489                                    float zoom_ratio, bool rotate,
1490                                    const SensorCharacteristics& chars) {
1491   ATRACE_CALL();
1492   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1493   // Using fixed-point math with 6 bits of fractional precision.
1494   // In fixed-point math, calculate total scaling from electrons to 8bpp
1495   const int scale64x =
1496       kFixedBitPrecision * total_gain * 255 / chars.max_raw_value;
1497   // Fixed-point coefficients for RGB-YUV transform
1498   // Based on JFIF RGB->YUV transform.
1499   // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1500   const int rgb_to_y[] = {19, 37, 7};
1501   const int rgb_to_cb[] = {-10, -21, 32, 524288};
1502   const int rgb_to_cr[] = {32, -26, -5, 524288};
1503   // Scale back to 8bpp non-fixed-point
1504   const int scale_out = 64;
1505   const int scale_out_sq = scale_out * scale_out;  // after multiplies
1506 
1507   // inc = how many pixels to skip while reading every next pixel
1508   const float aspect_ratio = static_cast<float>(width) / height;
1509 
1510   // precalculate normalized coordinates and dimensions
1511   const float norm_left_top = 0.5f - 0.5f / zoom_ratio;
1512   const float norm_rot_top = norm_left_top;
1513   const float norm_width = 1 / zoom_ratio;
1514   const float norm_rot_width = norm_width / aspect_ratio;
1515   const float norm_rot_height = norm_width;
1516   const float norm_rot_left =
1517       norm_left_top + (norm_width + norm_rot_width) * 0.5f;
1518 
1519   for (unsigned int out_y = 0; out_y < height; out_y++) {
1520     uint8_t* px_y = yuv_layout.img_y + out_y * yuv_layout.y_stride;
1521     uint8_t* px_cb = yuv_layout.img_cb + (out_y / 2) * yuv_layout.cbcr_stride;
1522     uint8_t* px_cr = yuv_layout.img_cr + (out_y / 2) * yuv_layout.cbcr_stride;
1523 
1524     for (unsigned int out_x = 0; out_x < width; out_x++) {
1525       int x, y;
1526       float norm_x = out_x / (width * zoom_ratio);
1527       float norm_y = out_y / (height * zoom_ratio);
1528       if (rotate) {
1529         x = static_cast<int>(chars.full_res_width *
1530                              (norm_rot_left - norm_y * norm_rot_width));
1531         y = static_cast<int>(chars.full_res_height *
1532                              (norm_rot_top + norm_x * norm_rot_height));
1533       } else {
1534         x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
1535         y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
1536       }
1537       x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1538       y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1539       scene_->SetReadoutPixel(x, y);
1540 
1541       int32_t r_count, g_count, b_count;
1542       // TODO: Perfect demosaicing is a cheat
1543       const uint32_t* pixel = rotate ? scene_->GetPixelElectronsColumn()
1544                                      : scene_->GetPixelElectrons();
1545       r_count = pixel[EmulatedScene::R] * scale64x;
1546       r_count = r_count < kSaturationPoint ? r_count : kSaturationPoint;
1547       g_count = pixel[EmulatedScene::Gr] * scale64x;
1548       g_count = g_count < kSaturationPoint ? g_count : kSaturationPoint;
1549       b_count = pixel[EmulatedScene::B] * scale64x;
1550       b_count = b_count < kSaturationPoint ? b_count : kSaturationPoint;
1551 
1552       // Gamma correction
1553       r_count = gamma_table_[r_count];
1554       g_count = gamma_table_[g_count];
1555       b_count = gamma_table_[b_count];
1556 
1557       uint8_t y8 = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
1558                     rgb_to_y[2] * b_count) /
1559                    scale_out_sq;
1560       if (yuv_layout.bytesPerPixel == 1) {
1561         *px_y = y8;
1562       } else if (yuv_layout.bytesPerPixel == 2) {
1563         *(reinterpret_cast<uint16_t*>(px_y)) = htole16(y8 << 8);
1564       } else {
1565         ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1566               yuv_layout.bytesPerPixel);
1567         return;
1568       }
1569       px_y += yuv_layout.bytesPerPixel;
1570 
1571       if (out_y % 2 == 0 && out_x % 2 == 0) {
1572         uint8_t cb8 = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
1573                        rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
1574                       scale_out_sq;
1575         uint8_t cr8 = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
1576                        rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
1577                       scale_out_sq;
1578         if (yuv_layout.bytesPerPixel == 1) {
1579           *px_cb = cb8;
1580           *px_cr = cr8;
1581         } else if (yuv_layout.bytesPerPixel == 2) {
1582           *(reinterpret_cast<uint16_t*>(px_cb)) = htole16(cb8 << 8);
1583           *(reinterpret_cast<uint16_t*>(px_cr)) = htole16(cr8 << 8);
1584         } else {
1585           ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1586                 yuv_layout.bytesPerPixel);
1587           return;
1588         }
1589         px_cr += yuv_layout.cbcr_step;
1590         px_cb += yuv_layout.cbcr_step;
1591       }
1592     }
1593   }
1594   ALOGVV("YUV420 sensor image captured");
1595 }
1596 
CaptureDepth(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,uint32_t stride,const SensorCharacteristics & chars)1597 void EmulatedSensor::CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width,
1598                                   uint32_t height, uint32_t stride,
1599                                   const SensorCharacteristics& chars) {
1600   ATRACE_CALL();
1601   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1602   // In fixed-point math, calculate scaling factor to 13bpp millimeters
1603   int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
1604   uint32_t inc_h = ceil((float)chars.full_res_width / width);
1605   uint32_t inc_v = ceil((float)chars.full_res_height / height);
1606 
1607   for (unsigned int y = 0, out_y = 0; y < chars.full_res_height;
1608        y += inc_v, out_y++) {
1609     scene_->SetReadoutPixel(0, y);
1610     uint16_t* px = (uint16_t*)(img + (out_y * stride));
1611     for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1612       uint32_t depth_count;
1613       // TODO: Make up real depth scene instead of using green channel
1614       // as depth
1615       const uint32_t* pixel = scene_->GetPixelElectrons();
1616       depth_count = pixel[EmulatedScene::Gr] * scale64x;
1617 
1618       *px++ = depth_count < 8191 * 64 ? depth_count / 64 : 0;
1619       for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1620     }
1621     // TODO: Handle this better
1622     // simulatedTime += mRowReadoutTime;
1623   }
1624   ALOGVV("Depth sensor image captured");
1625 }
1626 
ProcessYUV420(const YUV420Frame & input,const YUV420Frame & output,uint32_t gain,ProcessType process_type,float zoom_ratio,bool rotate_and_crop,const SensorCharacteristics & chars)1627 status_t EmulatedSensor::ProcessYUV420(const YUV420Frame& input,
1628                                        const YUV420Frame& output, uint32_t gain,
1629                                        ProcessType process_type, float zoom_ratio,
1630                                        bool rotate_and_crop,
1631                                        const SensorCharacteristics& chars) {
1632   ATRACE_CALL();
1633   size_t input_width, input_height;
1634   YCbCrPlanes input_planes, output_planes;
1635   std::vector<uint8_t> temp_yuv, temp_output_uv, temp_input_uv;
1636 
1637   // Overwrite HIGH_QUALITY to REGULAR for Emulator if property
1638   // ro.boot.qemu.camera_hq_edge_processing is false;
1639   if (process_type == HIGH_QUALITY &&
1640       !property_get_bool("ro.boot.qemu.camera_hq_edge_processing", true)) {
1641     process_type = REGULAR;
1642   }
1643 
1644   switch (process_type) {
1645     case HIGH_QUALITY:
1646       CaptureYUV420(output.planes, output.width, output.height, gain, zoom_ratio,
1647                     rotate_and_crop, chars);
1648       return OK;
1649     case REPROCESS:
1650       input_width = input.width;
1651       input_height = input.height;
1652       input_planes = input.planes;
1653 
1654       // libyuv only supports planar YUV420 during scaling.
1655       // Split the input U/V plane in separate planes if needed.
1656       if (input_planes.cbcr_step == 2) {
1657         temp_input_uv.resize(input_width * input_height / 2);
1658         auto temp_uv_buffer = temp_input_uv.data();
1659         input_planes.img_cb = temp_uv_buffer;
1660         input_planes.img_cr = temp_uv_buffer + (input_width * input_height) / 4;
1661         input_planes.cbcr_stride = input_width / 2;
1662         if (input.planes.img_cb < input.planes.img_cr) {
1663           libyuv::SplitUVPlane(input.planes.img_cb, input.planes.cbcr_stride,
1664                                input_planes.img_cb, input_planes.cbcr_stride,
1665                                input_planes.img_cr, input_planes.cbcr_stride,
1666                                input_width / 2, input_height / 2);
1667         } else {
1668           libyuv::SplitUVPlane(input.planes.img_cr, input.planes.cbcr_stride,
1669                                input_planes.img_cr, input_planes.cbcr_stride,
1670                                input_planes.img_cb, input_planes.cbcr_stride,
1671                                input_width / 2, input_height / 2);
1672         }
1673       }
1674       break;
1675     case REGULAR:
1676     default:
1677       // Generate the smallest possible frame with the expected AR and
1678       // then scale using libyuv.
1679       float aspect_ratio = static_cast<float>(output.width) / output.height;
1680       zoom_ratio = std::max(1.f, zoom_ratio);
1681       input_width = EmulatedScene::kSceneWidth * aspect_ratio;
1682       input_height = EmulatedScene::kSceneHeight;
1683       temp_yuv.reserve((input_width * input_height * 3) / 2);
1684       auto temp_yuv_buffer = temp_yuv.data();
1685       input_planes = {
1686           .img_y = temp_yuv_buffer,
1687           .img_cb = temp_yuv_buffer + input_width * input_height,
1688           .img_cr = temp_yuv_buffer + (input_width * input_height * 5) / 4,
1689           .y_stride = static_cast<uint32_t>(input_width),
1690           .cbcr_stride = static_cast<uint32_t>(input_width) / 2,
1691           .cbcr_step = 1};
1692       CaptureYUV420(input_planes, input_width, input_height, gain, zoom_ratio,
1693                     rotate_and_crop, chars);
1694   }
1695 
1696   output_planes = output.planes;
1697   // libyuv only supports planar YUV420 during scaling.
1698   // Treat the output UV space as planar first and then
1699   // interleave in the second step.
1700   if (output_planes.cbcr_step == 2) {
1701     temp_output_uv.resize(output.width * output.height / 2);
1702     auto temp_uv_buffer = temp_output_uv.data();
1703     output_planes.img_cb = temp_uv_buffer;
1704     output_planes.img_cr = temp_uv_buffer + output.width * output.height / 4;
1705     output_planes.cbcr_stride = output.width / 2;
1706   }
1707 
1708   auto ret = I420Scale(
1709       input_planes.img_y, input_planes.y_stride, input_planes.img_cb,
1710       input_planes.cbcr_stride, input_planes.img_cr, input_planes.cbcr_stride,
1711       input_width, input_height, output_planes.img_y, output_planes.y_stride,
1712       output_planes.img_cb, output_planes.cbcr_stride, output_planes.img_cr,
1713       output_planes.cbcr_stride, output.width, output.height,
1714       libyuv::kFilterNone);
1715   if (ret != 0) {
1716     ALOGE("%s: Failed during YUV scaling: %d", __FUNCTION__, ret);
1717     return ret;
1718   }
1719 
1720   // Merge U/V Planes for the interleaved case
1721   if (output_planes.cbcr_step == 2) {
1722     if (output.planes.img_cb < output.planes.img_cr) {
1723       libyuv::MergeUVPlane(output_planes.img_cb, output_planes.cbcr_stride,
1724                            output_planes.img_cr, output_planes.cbcr_stride,
1725                            output.planes.img_cb, output.planes.cbcr_stride,
1726                            output.width / 2, output.height / 2);
1727     } else {
1728       libyuv::MergeUVPlane(output_planes.img_cr, output_planes.cbcr_stride,
1729                            output_planes.img_cb, output_planes.cbcr_stride,
1730                            output.planes.img_cr, output.planes.cbcr_stride,
1731                            output.width / 2, output.height / 2);
1732     }
1733   }
1734 
1735   return ret;
1736 }
1737 
ApplysRGBGamma(int32_t value,int32_t saturation)1738 int32_t EmulatedSensor::ApplysRGBGamma(int32_t value, int32_t saturation) {
1739   float n_value = (static_cast<float>(value) / saturation);
1740   n_value = (n_value <= 0.0031308f)
1741                 ? n_value * 12.92f
1742                 : 1.055f * pow(n_value, 0.4166667f) - 0.055f;
1743   return n_value * saturation;
1744 }
1745 
1746 }  // namespace android
1747