• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * This class is a simple simulation of a typical CMOS cellphone imager chip,
19  * which outputs 12-bit Bayer-mosaic raw images.
20  *
21  * Unlike most real image sensors, this one's native color space is linear sRGB.
22  *
23  * The sensor is abstracted as operating as a pipeline 3 stages deep;
24  * conceptually, each frame to be captured goes through these three stages. The
25  * processing step for the sensor is marked off by vertical sync signals, which
26  * indicate the start of readout of the oldest frame. The interval between
27  * processing steps depends on the frame duration of the frame currently being
28  * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29  * configuration, the sensor's registers for settings such as exposure time,
30  * frame duration, and gain are set for the next frame to be captured. In stage
31  * 2, the image data for the frame is actually captured by the sensor. Finally,
32  * in stage 3, the just-captured data is read out and sent to the rest of the
33  * system.
34  *
35  * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36  * sensor are exposed earlier in time than larger-numbered rows, with the time
37  * offset between each row being equal to the row readout time.
38  *
39  * The characteristics of this sensor don't correspond to any actual sensor,
40  * but are not far off typical sensors.
41  *
42  * Example timing diagram, with three frames:
43  *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44  *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
45  * Legend:
46  *   C = update sensor registers for frame
47  *   v = row in reset (vertical blanking interval)
48  *   E = row capturing image data
49  *   R = row being read out
50  *   | = vertical sync signal
51  *time(ms)|   0          55        105       155            230     270
52  * Frame 0|   :configure : capture : readout :              :       :
53  *  Row # | ..|CCCC______|_________|_________|              :       :
54  *      0 |   :\          \vvvvvEEEER         \             :       :
55  *    500 |   : \          \vvvvvEEEER         \            :       :
56  *   1000 |   :  \          \vvvvvEEEER         \           :       :
57  *   1500 |   :   \          \vvvvvEEEER         \          :       :
58  *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
59  * Frame 1|   :           configure  capture      readout   :       :
60  *  Row # |   :          |CCCC_____|_________|______________|       :
61  *      0 |   :          :\         \vvvvvEEEER              \      :
62  *    500 |   :          : \         \vvvvvEEEER              \     :
63  *   1000 |   :          :  \         \vvvvvEEEER              \    :
64  *   1500 |   :          :   \         \vvvvvEEEER              \   :
65  *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
66  * Frame 2|   :          :          configure     capture    readout:
67  *  Row # |   :          :         |CCCC_____|______________|_______|...
68  *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
69  *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
70  *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
71  *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
72  *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
73  */
74 
75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76 #define HW_EMULATOR_CAMERA2_SENSOR_H
77 
78 #include <android/hardware/graphics/common/1.2/types.h>
79 #include <hwl_types.h>
80 
81 #include <algorithm>
82 #include <functional>
83 
84 #include "Base.h"
85 #include "EmulatedScene.h"
86 #include "JpegCompressor.h"
87 #include "utils/Mutex.h"
88 #include "utils/StreamConfigurationMap.h"
89 #include "utils/Thread.h"
90 #include "utils/Timers.h"
91 
92 namespace android {
93 
94 using google_camera_hal::ColorSpaceProfile;
95 using google_camera_hal::DynamicRangeProfile;
96 using google_camera_hal::HwlPipelineCallback;
97 using google_camera_hal::HwlPipelineResult;
98 using google_camera_hal::StreamConfiguration;
99 
100 using hardware::graphics::common::V1_2::Dataspace;
101 
102 /*
103  * Default to sRGB with D65 white point
104  */
105 struct ColorFilterXYZ {
106   float rX = 3.2406f;
107   float rY = -1.5372f;
108   float rZ = -0.4986f;
109   float grX = -0.9689f;
110   float grY = 1.8758f;
111   float grZ = 0.0415f;
112   float gbX = -0.9689f;
113   float gbY = 1.8758f;
114   float gbZ = 0.0415f;
115   float bX = 0.0557f;
116   float bY = -0.2040f;
117   float bZ = 1.0570f;
118 };
119 
120 struct ForwardMatrix {
121   float rX = 0.4355f;
122   float gX = 0.3848f;
123   float bX = 0.1425f;
124   float rY = 0.2216f;
125   float gY = 0.7168f;
126   float bY = 0.0605f;
127   float rZ = 0.0137f;
128   float gZ = 0.0967f;
129   float bZ = 0.7139f;
130 };
131 
132 struct RgbRgbMatrix {
133   float rR;
134   float gR;
135   float bR;
136   float rG;
137   float gG;
138   float bG;
139   float rB;
140   float gB;
141   float bB;
142 };
143 
144 typedef std::unordered_map<DynamicRangeProfile,
145                            std::unordered_set<DynamicRangeProfile>>
146     DynamicRangeProfileMap;
147 
148 typedef std::unordered_map<
149     ColorSpaceProfile,
150     std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>>
151     ColorSpaceProfileMap;
152 
153 struct SensorCharacteristics {
154   size_t width = 0;
155   size_t height = 0;
156   size_t full_res_width = 0;
157   size_t full_res_height = 0;
158   nsecs_t exposure_time_range[2] = {0};
159   nsecs_t frame_duration_range[2] = {0};
160   int32_t sensitivity_range[2] = {0};
161   camera_metadata_enum_android_sensor_info_color_filter_arrangement
162       color_arangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
163   ColorFilterXYZ color_filter;
164   ForwardMatrix forward_matrix;
165   uint32_t max_raw_value = 0;
166   uint32_t black_level_pattern[4] = {0};
167   uint32_t max_raw_streams = 0;
168   uint32_t max_processed_streams = 0;
169   uint32_t max_stalling_streams = 0;
170   uint32_t max_input_streams = 0;
171   uint32_t physical_size[2] = {0};
172   bool is_flash_supported = false;
173   uint32_t lens_shading_map_size[2] = {0};
174   uint32_t max_pipeline_depth = 0;
175   uint32_t orientation = 0;
176   bool is_front_facing = false;
177   bool quad_bayer_sensor = false;
178   bool is_10bit_dynamic_range_capable = false;
179   DynamicRangeProfileMap dynamic_range_profiles;
180   bool support_stream_use_case = false;
181   int64_t end_valid_stream_use_case =
182       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
183   bool support_color_space_profiles = false;
184   ColorSpaceProfileMap color_space_profiles;
185   int32_t raw_crop_region_zoomed[4] = {0};
186   int32_t raw_crop_region_unzoomed[4] = {0};
187 };
188 
189 // Maps logical/physical camera ids to sensor characteristics
190 typedef std::unordered_map<uint32_t, SensorCharacteristics> LogicalCharacteristics;
191 
192 class EmulatedSensor : private Thread, public virtual RefBase {
193  public:
194   EmulatedSensor();
195   ~EmulatedSensor();
196 
OverrideFormat(android_pixel_format_t format,DynamicRangeProfile dynamic_range_profile)197   static android_pixel_format_t OverrideFormat(
198       android_pixel_format_t format, DynamicRangeProfile dynamic_range_profile) {
199     switch (dynamic_range_profile) {
200       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
201         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
202           return HAL_PIXEL_FORMAT_YCBCR_420_888;
203         }
204         break;
205       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
206         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
207           return static_cast<android_pixel_format_t>(
208               HAL_PIXEL_FORMAT_YCBCR_P010);
209         }
210         break;
211       default:
212         ALOGE("%s: Unsupported dynamic range profile 0x%x", __FUNCTION__,
213               dynamic_range_profile);
214     }
215 
216     return format;
217   }
218 
IsReprocessPathSupported(android_pixel_format_t input_format,android_pixel_format_t output_format)219   static bool IsReprocessPathSupported(android_pixel_format_t input_format,
220                                        android_pixel_format_t output_format) {
221     if ((HAL_PIXEL_FORMAT_YCBCR_420_888 == input_format) &&
222         ((HAL_PIXEL_FORMAT_YCBCR_420_888 == output_format) ||
223          (HAL_PIXEL_FORMAT_BLOB == output_format))) {
224       return true;
225     }
226 
227     if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
228         HAL_PIXEL_FORMAT_RAW16 == output_format) {
229       return true;
230     }
231 
232     return false;
233   }
234 
235   static bool AreCharacteristicsSupported(
236       const SensorCharacteristics& characteristics);
237 
238   static bool IsStreamCombinationSupported(
239       uint32_t logical_id, const StreamConfiguration& config,
240       StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
241       const PhysicalStreamConfigurationMap& physical_map,
242       const PhysicalStreamConfigurationMap& physical_map_max_resolution,
243       const LogicalCharacteristics& sensor_chars);
244 
245   static bool IsStreamCombinationSupported(
246       uint32_t logical_id, const StreamConfiguration& config,
247       StreamConfigurationMap& map,
248       const PhysicalStreamConfigurationMap& physical_map,
249       const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
250 
251   /*
252    * Power control
253    */
254 
255   status_t StartUp(uint32_t logical_camera_id,
256                    std::unique_ptr<LogicalCharacteristics> logical_chars);
257   status_t ShutDown();
258 
259   /*
260    * Physical camera settings control
261    */
262   struct SensorSettings {
263     nsecs_t exposure_time = 0;
264     nsecs_t frame_duration = 0;
265     uint32_t gain = 0;  // ISO
266     uint32_t lens_shading_map_mode;
267     bool report_neutral_color_point = false;
268     bool report_green_split = false;
269     bool report_noise_profile = false;
270     float zoom_ratio = 1.0f;
271     bool report_rotate_and_crop = false;
272     uint8_t rotate_and_crop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
273     bool report_video_stab = false;
274     uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
275     bool report_edge_mode = false;
276     uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
277     uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
278     uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
279     uint32_t test_pattern_data[4] = {0, 0, 0, 0};
280     uint32_t screen_rotation = 0;
281   };
282 
283   // Maps physical and logical camera ids to individual device settings
284   typedef std::unordered_map<uint32_t, SensorSettings> LogicalCameraSettings;
285 
286   void SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,
287                          std::unique_ptr<HwlPipelineResult> result,
288                          std::unique_ptr<Buffers> input_buffers,
289                          std::unique_ptr<Buffers> output_buffers);
290 
291   status_t Flush();
292 
293   /*
294    * Synchronizing with sensor operation (vertical sync)
295    */
296 
297   // Wait until the sensor outputs its next vertical sync signal, meaning it
298   // is starting readout of its latest frame of data. Returns true if vertical
299   // sync is signaled, false if the wait timed out.
300   bool WaitForVSync(nsecs_t rel_time);
301 
302   static const nsecs_t kSupportedExposureTimeRange[2];
303   static const nsecs_t kSupportedFrameDurationRange[2];
304   static const int32_t kSupportedSensitivityRange[2];
305   static const uint8_t kSupportedColorFilterArrangement;
306   static const uint32_t kDefaultMaxRawValue;
307   static const nsecs_t kDefaultExposureTime;
308   static const int32_t kDefaultSensitivity;
309   static const nsecs_t kDefaultFrameDuration;
310   static const nsecs_t kReturnResultThreshod;
311   static const uint32_t kDefaultBlackLevelPattern[4];
312   static const camera_metadata_rational kDefaultColorTransform[9];
313   static const float kDefaultColorCorrectionGains[4];
314   static const float kDefaultToneMapCurveRed[4];
315   static const float kDefaultToneMapCurveGreen[4];
316   static const float kDefaultToneMapCurveBlue[4];
317   static const uint8_t kPipelineDepth;
318 
319  private:
320   // Scene stabilization
321   static const uint32_t kRegularSceneHandshake;
322   static const uint32_t kReducedSceneHandshake;
323 
324   /**
325    * Logical characteristics
326    */
327   std::unique_ptr<LogicalCharacteristics> chars_;
328 
329   uint32_t logical_camera_id_ = 0;
330 
331   static const nsecs_t kMinVerticalBlank;
332 
333   // Sensor sensitivity, approximate
334 
335   static const float kSaturationVoltage;
336   static const uint32_t kSaturationElectrons;
337   static const float kVoltsPerLuxSecond;
338   static const float kElectronsPerLuxSecond;
339 
340   static const float kReadNoiseStddevBeforeGain;  // In electrons
341   static const float kReadNoiseStddevAfterGain;   // In raw digital units
342   static const float kReadNoiseVarBeforeGain;
343   static const float kReadNoiseVarAfterGain;
344   static const camera_metadata_rational kNeutralColorPoint[3];
345   static const float kGreenSplit;
346 
347   static const uint32_t kMaxRAWStreams;
348   static const uint32_t kMaxProcessedStreams;
349   static const uint32_t kMaxStallingStreams;
350   static const uint32_t kMaxInputStreams;
351   static const uint32_t kMaxLensShadingMapSize[2];
352   static const int32_t kFixedBitPrecision;
353   static const int32_t kSaturationPoint;
354 
355   std::vector<int32_t> gamma_table_sRGB_;
356   std::vector<int32_t> gamma_table_smpte170m_;
357   std::vector<int32_t> gamma_table_hlg_;
358 
359   Mutex control_mutex_;  // Lock before accessing control parameters
360   // Start of control parameters
361   Condition vsync_;
362   bool got_vsync_;
363   std::unique_ptr<LogicalCameraSettings> current_settings_;
364   std::unique_ptr<HwlPipelineResult> current_result_;
365   std::unique_ptr<Buffers> current_output_buffers_;
366   std::unique_ptr<Buffers> current_input_buffers_;
367   std::unique_ptr<JpegCompressor> jpeg_compressor_;
368 
369   // End of control parameters
370 
371   unsigned int rand_seed_ = 1;
372 
373   /**
374    * Inherited Thread virtual overrides, and members only used by the
375    * processing thread
376    */
377   bool threadLoop() override;
378 
379   nsecs_t next_capture_time_;
380   nsecs_t next_readout_time_;
381 
382   struct SensorBinningFactorInfo {
383     bool has_raw_stream = false;
384     bool has_non_raw_stream = false;
385     bool quad_bayer_sensor = false;
386     bool max_res_request = false;
387     bool has_cropped_raw_stream = false;
388     bool raw_in_sensor_zoom_applied = false;
389   };
390 
391   std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
392 
393   std::unique_ptr<EmulatedScene> scene_;
394 
395   RgbRgbMatrix rgb_rgb_matrix_;
396 
397   static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
398 
399   static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
400                                      int xstart, int ystart,
401                                      int row_stride_in_bytes);
402 
403   static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
404                                      size_t row_stride_in_bytes,
405                                      const SensorCharacteristics& chars);
406 
407   void CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
408                         const SensorCharacteristics& chars);
409 
410   void CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
411                          uint32_t gain, const SensorCharacteristics& chars);
412   void CaptureRawInSensorZoom(uint8_t* img, size_t row_stride_in_bytes,
413                               uint32_t gain, const SensorCharacteristics& chars);
414   void CaptureRaw(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
415                   const SensorCharacteristics& chars, bool in_sensor_zoom,
416                   bool binned);
417 
418   enum RGBLayout { RGB, RGBA, ARGB };
419   void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
420                   uint32_t stride, RGBLayout layout, uint32_t gain,
421                   int32_t color_space, const SensorCharacteristics& chars);
422   void CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width, uint32_t height,
423                      uint32_t gain, float zoom_ratio, bool rotate,
424                      int32_t color_space, const SensorCharacteristics& chars);
425   void CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width, uint32_t height,
426                     uint32_t stride, const SensorCharacteristics& chars);
427   void RgbToRgb(uint32_t* r_count, uint32_t* g_count, uint32_t* b_count);
428   void CalculateRgbRgbMatrix(int32_t color_space,
429                              const SensorCharacteristics& chars);
430 
431   struct YUV420Frame {
432     uint32_t width = 0;
433     uint32_t height = 0;
434     YCbCrPlanes planes;
435   };
436 
437   enum ProcessType { REPROCESS, HIGH_QUALITY, REGULAR };
438   status_t ProcessYUV420(const YUV420Frame& input, const YUV420Frame& output,
439                          uint32_t gain, ProcessType process_type,
440                          float zoom_ratio, bool rotate_and_crop,
441                          int32_t color_space,
442                          const SensorCharacteristics& chars);
443 
444   inline int32_t ApplysRGBGamma(int32_t value, int32_t saturation);
445   inline int32_t ApplySMPTE170MGamma(int32_t value, int32_t saturation);
446   inline int32_t ApplyST2084Gamma(int32_t value, int32_t saturation);
447   inline int32_t ApplyHLGGamma(int32_t value, int32_t saturation);
448   inline int32_t GammaTable(int32_t value, int32_t color_space);
449 
450   bool WaitForVSyncLocked(nsecs_t reltime);
451   void CalculateAndAppendNoiseProfile(float gain /*in ISO*/,
452                                       float base_gain_factor,
453                                       HalCameraMetadata* result /*out*/);
454 
455   void ReturnResults(HwlPipelineCallback callback,
456                      std::unique_ptr<LogicalCameraSettings> settings,
457                      std::unique_ptr<HwlPipelineResult> result,
458                      bool reprocess_request);
459 
GetBaseGainFactor(float max_raw_value)460   static float GetBaseGainFactor(float max_raw_value) {
461     return max_raw_value / EmulatedSensor::kSaturationElectrons;
462   }
463 };
464 
465 }  // namespace android
466 
467 #endif  // HW_EMULATOR_CAMERA2_SENSOR_H
468