• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * This class is a simple simulation of a typical CMOS cellphone imager chip,
19  * which outputs 12-bit Bayer-mosaic raw images.
20  *
21  * Unlike most real image sensors, this one's native color space is linear sRGB.
22  *
23  * The sensor is abstracted as operating as a pipeline 3 stages deep;
24  * conceptually, each frame to be captured goes through these three stages. The
25  * processing step for the sensor is marked off by vertical sync signals, which
26  * indicate the start of readout of the oldest frame. The interval between
27  * processing steps depends on the frame duration of the frame currently being
28  * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29  * configuration, the sensor's registers for settings such as exposure time,
30  * frame duration, and gain are set for the next frame to be captured. In stage
31  * 2, the image data for the frame is actually captured by the sensor. Finally,
32  * in stage 3, the just-captured data is read out and sent to the rest of the
33  * system.
34  *
35  * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36  * sensor are exposed earlier in time than larger-numbered rows, with the time
37  * offset between each row being equal to the row readout time.
38  *
39  * The characteristics of this sensor don't correspond to any actual sensor,
40  * but are not far off typical sensors.
41  *
42  * Example timing diagram, with three frames:
43  *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44  *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
45  * Legend:
46  *   C = update sensor registers for frame
47  *   v = row in reset (vertical blanking interval)
48  *   E = row capturing image data
49  *   R = row being read out
50  *   | = vertical sync signal
51  *time(ms)|   0          55        105       155            230     270
52  * Frame 0|   :configure : capture : readout :              :       :
53  *  Row # | ..|CCCC______|_________|_________|              :       :
54  *      0 |   :\          \vvvvvEEEER         \             :       :
55  *    500 |   : \          \vvvvvEEEER         \            :       :
56  *   1000 |   :  \          \vvvvvEEEER         \           :       :
57  *   1500 |   :   \          \vvvvvEEEER         \          :       :
58  *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
59  * Frame 1|   :           configure  capture      readout   :       :
60  *  Row # |   :          |CCCC_____|_________|______________|       :
61  *      0 |   :          :\         \vvvvvEEEER              \      :
62  *    500 |   :          : \         \vvvvvEEEER              \     :
63  *   1000 |   :          :  \         \vvvvvEEEER              \    :
64  *   1500 |   :          :   \         \vvvvvEEEER              \   :
65  *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
66  * Frame 2|   :          :          configure     capture    readout:
67  *  Row # |   :          :         |CCCC_____|______________|_______|...
68  *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
69  *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
70  *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
71  *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
72  *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
73  */
74 
75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76 #define HW_EMULATOR_CAMERA2_SENSOR_H
77 
78 #include <android/hardware/graphics/common/1.2/types.h>
79 #include <hwl_types.h>
80 
81 #include <algorithm>
82 #include <functional>
83 
84 #include "Base.h"
85 #include "EmulatedScene.h"
86 #include "JpegCompressor.h"
87 #include "utils/Mutex.h"
88 #include "utils/StreamConfigurationMap.h"
89 #include "utils/Thread.h"
90 #include "utils/Timers.h"
91 
92 namespace android {
93 
94 using google_camera_hal::ColorSpaceProfile;
95 using google_camera_hal::DynamicRangeProfile;
96 using google_camera_hal::HwlPipelineCallback;
97 using google_camera_hal::HwlPipelineResult;
98 using google_camera_hal::StreamConfiguration;
99 
100 using hardware::graphics::common::V1_2::Dataspace;
101 
102 /*
103  * Default to sRGB with D65 white point
104  */
105 struct ColorFilterXYZ {
106   float rX = 3.2406f;
107   float rY = -1.5372f;
108   float rZ = -0.4986f;
109   float grX = -0.9689f;
110   float grY = 1.8758f;
111   float grZ = 0.0415f;
112   float gbX = -0.9689f;
113   float gbY = 1.8758f;
114   float gbZ = 0.0415f;
115   float bX = 0.0557f;
116   float bY = -0.2040f;
117   float bZ = 1.0570f;
118 };
119 
120 struct ForwardMatrix {
121   float rX = 0.4355f;
122   float gX = 0.3848f;
123   float bX = 0.1425f;
124   float rY = 0.2216f;
125   float gY = 0.7168f;
126   float bY = 0.0605f;
127   float rZ = 0.0137f;
128   float gZ = 0.0967f;
129   float bZ = 0.7139f;
130 };
131 
132 struct RgbRgbMatrix {
133   float rR;
134   float gR;
135   float bR;
136   float rG;
137   float gG;
138   float bG;
139   float rB;
140   float gB;
141   float bB;
142 };
143 
144 typedef std::unordered_map<DynamicRangeProfile,
145                            std::unordered_set<DynamicRangeProfile>>
146     DynamicRangeProfileMap;
147 
148 typedef std::unordered_map<
149     ColorSpaceProfile,
150     std::unordered_map<int, std::unordered_set<DynamicRangeProfile>>>
151     ColorSpaceProfileMap;
152 
153 struct SensorCharacteristics {
154   size_t width = 0;
155   size_t height = 0;
156   size_t full_res_width = 0;
157   size_t full_res_height = 0;
158   nsecs_t exposure_time_range[2] = {0};
159   nsecs_t frame_duration_range[2] = {0};
160   int32_t sensitivity_range[2] = {0};
161   camera_metadata_enum_android_sensor_info_color_filter_arrangement
162       color_arangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
163   ColorFilterXYZ color_filter;
164   ForwardMatrix forward_matrix;
165   uint32_t max_raw_value = 0;
166   uint32_t black_level_pattern[4] = {0};
167   uint32_t max_raw_streams = 0;
168   uint32_t max_processed_streams = 0;
169   uint32_t max_stalling_streams = 0;
170   uint32_t max_input_streams = 0;
171   uint32_t physical_size[2] = {0};
172   bool is_flash_supported = false;
173   uint32_t lens_shading_map_size[2] = {0};
174   uint32_t max_pipeline_depth = 0;
175   uint32_t orientation = 0;
176   bool is_front_facing = false;
177   bool quad_bayer_sensor = false;
178   bool is_10bit_dynamic_range_capable = false;
179   DynamicRangeProfileMap dynamic_range_profiles;
180   bool support_stream_use_case = false;
181   int64_t end_valid_stream_use_case =
182       ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
183   bool support_color_space_profiles = false;
184   ColorSpaceProfileMap color_space_profiles;
185   int32_t raw_crop_region_zoomed[4] = {0};
186   int32_t raw_crop_region_unzoomed[4] = {0};
187   int32_t timestamp_source = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
188 };
189 
190 // Maps logical/physical camera ids to sensor characteristics
191 typedef std::unordered_map<uint32_t, SensorCharacteristics> LogicalCharacteristics;
192 
193 class EmulatedSensor : private Thread, public virtual RefBase {
194  public:
195   EmulatedSensor();
196   ~EmulatedSensor();
197 
OverrideFormat(android_pixel_format_t format,DynamicRangeProfile dynamic_range_profile)198   static android_pixel_format_t OverrideFormat(
199       android_pixel_format_t format, DynamicRangeProfile dynamic_range_profile) {
200     switch (dynamic_range_profile) {
201       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
202         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
203           return HAL_PIXEL_FORMAT_YCBCR_420_888;
204         }
205         break;
206       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
207         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
208           return static_cast<android_pixel_format_t>(
209               HAL_PIXEL_FORMAT_YCBCR_P010);
210         }
211         break;
212       default:
213         ALOGE("%s: Unsupported dynamic range profile 0x%x", __FUNCTION__,
214               dynamic_range_profile);
215     }
216 
217     return format;
218   }
219 
IsReprocessPathSupported(android_pixel_format_t input_format,android_pixel_format_t output_format)220   static bool IsReprocessPathSupported(android_pixel_format_t input_format,
221                                        android_pixel_format_t output_format) {
222     if ((HAL_PIXEL_FORMAT_YCBCR_420_888 == input_format) &&
223         ((HAL_PIXEL_FORMAT_YCBCR_420_888 == output_format) ||
224          (HAL_PIXEL_FORMAT_BLOB == output_format))) {
225       return true;
226     }
227 
228     if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
229         HAL_PIXEL_FORMAT_RAW16 == output_format) {
230       return true;
231     }
232 
233     return false;
234   }
235 
236   static bool AreCharacteristicsSupported(
237       const SensorCharacteristics& characteristics);
238 
239   static bool IsStreamCombinationSupported(
240       uint32_t logical_id, const StreamConfiguration& config,
241       StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
242       const PhysicalStreamConfigurationMap& physical_map,
243       const PhysicalStreamConfigurationMap& physical_map_max_resolution,
244       const LogicalCharacteristics& sensor_chars);
245 
246   static bool IsStreamCombinationSupported(
247       uint32_t logical_id, const StreamConfiguration& config,
248       StreamConfigurationMap& map,
249       const PhysicalStreamConfigurationMap& physical_map,
250       const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
251 
252   /*
253    * Power control
254    */
255 
256   status_t StartUp(uint32_t logical_camera_id,
257                    std::unique_ptr<LogicalCharacteristics> logical_chars);
258   status_t ShutDown();
259 
260   /*
261    * Physical camera settings control
262    */
263   struct SensorSettings {
264     nsecs_t exposure_time = 0;
265     nsecs_t frame_duration = 0;
266     uint32_t gain = 0;  // ISO
267     uint32_t lens_shading_map_mode;
268     bool report_neutral_color_point = false;
269     bool report_green_split = false;
270     bool report_noise_profile = false;
271     float zoom_ratio = 1.0f;
272     bool report_rotate_and_crop = false;
273     uint8_t rotate_and_crop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
274     bool report_video_stab = false;
275     uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
276     bool report_edge_mode = false;
277     uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
278     uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
279     uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
280     uint32_t test_pattern_data[4] = {0, 0, 0, 0};
281     uint32_t screen_rotation = 0;
282     uint32_t timestamp_source = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
283   };
284 
285   // Maps physical and logical camera ids to individual device settings
286   typedef std::unordered_map<uint32_t, SensorSettings> LogicalCameraSettings;
287 
288   void SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,
289                          std::unique_ptr<HwlPipelineResult> result,
290                          std::unique_ptr<HwlPipelineResult> partial_result,
291                          std::unique_ptr<Buffers> input_buffers,
292                          std::unique_ptr<Buffers> output_buffers);
293 
294   status_t Flush();
295 
296   /*
297    * Synchronizing with sensor operation (vertical sync)
298    */
299 
300   // Wait until the sensor outputs its next vertical sync signal, meaning it
301   // is starting readout of its latest frame of data. Returns true if vertical
302   // sync is signaled, false if the wait timed out.
303   bool WaitForVSync(nsecs_t rel_time);
304 
305   static const nsecs_t kSupportedExposureTimeRange[2];
306   static const nsecs_t kSupportedFrameDurationRange[2];
307   static const int32_t kSupportedSensitivityRange[2];
308   static const uint8_t kSupportedColorFilterArrangement;
309   static const uint32_t kDefaultMaxRawValue;
310   static const nsecs_t kDefaultExposureTime;
311   static const int32_t kDefaultSensitivity;
312   static const nsecs_t kDefaultFrameDuration;
313   static const nsecs_t kReturnResultThreshod;
314   static const uint32_t kDefaultBlackLevelPattern[4];
315   static const camera_metadata_rational kDefaultColorTransform[9];
316   static const float kDefaultColorCorrectionGains[4];
317   static const float kDefaultToneMapCurveRed[4];
318   static const float kDefaultToneMapCurveGreen[4];
319   static const float kDefaultToneMapCurveBlue[4];
320   static const uint8_t kPipelineDepth;
321 
322  private:
323   // Scene stabilization
324   static const uint32_t kRegularSceneHandshake;
325   static const uint32_t kReducedSceneHandshake;
326 
327   /**
328    * Logical characteristics
329    */
330   std::unique_ptr<LogicalCharacteristics> chars_;
331 
332   uint32_t logical_camera_id_ = 0;
333 
334   static const nsecs_t kMinVerticalBlank;
335 
336   // Sensor sensitivity, approximate
337 
338   static const float kSaturationVoltage;
339   static const uint32_t kSaturationElectrons;
340   static const float kVoltsPerLuxSecond;
341   static const float kElectronsPerLuxSecond;
342 
343   static const float kReadNoiseStddevBeforeGain;  // In electrons
344   static const float kReadNoiseStddevAfterGain;   // In raw digital units
345   static const float kReadNoiseVarBeforeGain;
346   static const float kReadNoiseVarAfterGain;
347   static const camera_metadata_rational kNeutralColorPoint[3];
348   static const float kGreenSplit;
349 
350   static const uint32_t kMaxRAWStreams;
351   static const uint32_t kMaxProcessedStreams;
352   static const uint32_t kMaxStallingStreams;
353   static const uint32_t kMaxInputStreams;
354   static const uint32_t kMaxLensShadingMapSize[2];
355   static const int32_t kFixedBitPrecision;
356   static const int32_t kSaturationPoint;
357 
358   std::vector<int32_t> gamma_table_sRGB_;
359   std::vector<int32_t> gamma_table_smpte170m_;
360   std::vector<int32_t> gamma_table_hlg_;
361 
362   Mutex control_mutex_;  // Lock before accessing control parameters
363   // Start of control parameters
364   Condition vsync_;
365   bool got_vsync_;
366   std::unique_ptr<LogicalCameraSettings> current_settings_;
367   std::unique_ptr<HwlPipelineResult> current_result_;
368   std::unique_ptr<HwlPipelineResult> partial_result_;
369   std::unique_ptr<Buffers> current_output_buffers_;
370   std::unique_ptr<Buffers> current_input_buffers_;
371   std::unique_ptr<JpegCompressor> jpeg_compressor_;
372 
373   // End of control parameters
374 
375   unsigned int rand_seed_ = 1;
376 
377   /**
378    * Inherited Thread virtual overrides, and members only used by the
379    * processing thread
380    */
381   bool threadLoop() override;
382 
383   nsecs_t next_capture_time_;
384   nsecs_t next_readout_time_;
385 
386   struct SensorBinningFactorInfo {
387     bool has_raw_stream = false;
388     bool has_non_raw_stream = false;
389     bool quad_bayer_sensor = false;
390     bool max_res_request = false;
391     bool has_cropped_raw_stream = false;
392     bool raw_in_sensor_zoom_applied = false;
393   };
394 
395   std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
396 
397   std::unique_ptr<EmulatedScene> scene_;
398 
399   RgbRgbMatrix rgb_rgb_matrix_;
400 
401   static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
402 
403   static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
404                                      int xstart, int ystart,
405                                      int row_stride_in_bytes);
406 
407   static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
408                                      size_t row_stride_in_bytes,
409                                      const SensorCharacteristics& chars);
410 
411   void CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
412                         const SensorCharacteristics& chars);
413 
414   void CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
415                          uint32_t gain, const SensorCharacteristics& chars);
416   void CaptureRawInSensorZoom(uint8_t* img, size_t row_stride_in_bytes,
417                               uint32_t gain, const SensorCharacteristics& chars);
418   void CaptureRaw(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
419                   const SensorCharacteristics& chars, bool in_sensor_zoom,
420                   bool binned);
421 
422   enum RGBLayout { RGB, RGBA, ARGB };
423   void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
424                   uint32_t stride, RGBLayout layout, uint32_t gain,
425                   int32_t color_space, const SensorCharacteristics& chars);
426   void CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width, uint32_t height,
427                      uint32_t gain, float zoom_ratio, bool rotate,
428                      int32_t color_space, const SensorCharacteristics& chars);
429   void CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width, uint32_t height,
430                     uint32_t stride, const SensorCharacteristics& chars);
431   void RgbToRgb(uint32_t* r_count, uint32_t* g_count, uint32_t* b_count);
432   void CalculateRgbRgbMatrix(int32_t color_space,
433                              const SensorCharacteristics& chars);
434 
435   struct YUV420Frame {
436     uint32_t width = 0;
437     uint32_t height = 0;
438     YCbCrPlanes planes;
439   };
440 
441   enum ProcessType { REPROCESS, HIGH_QUALITY, REGULAR };
442   status_t ProcessYUV420(const YUV420Frame& input, const YUV420Frame& output,
443                          uint32_t gain, ProcessType process_type,
444                          float zoom_ratio, bool rotate_and_crop,
445                          int32_t color_space,
446                          const SensorCharacteristics& chars);
447 
448   inline int32_t ApplysRGBGamma(int32_t value, int32_t saturation);
449   inline int32_t ApplySMPTE170MGamma(int32_t value, int32_t saturation);
450   inline int32_t ApplyST2084Gamma(int32_t value, int32_t saturation);
451   inline int32_t ApplyHLGGamma(int32_t value, int32_t saturation);
452   inline int32_t GammaTable(int32_t value, int32_t color_space);
453 
454   bool WaitForVSyncLocked(nsecs_t reltime);
455   void CalculateAndAppendNoiseProfile(float gain /*in ISO*/,
456                                       float base_gain_factor,
457                                       HalCameraMetadata* result /*out*/);
458 
459   void ReturnResults(HwlPipelineCallback callback,
460                      std::unique_ptr<LogicalCameraSettings> settings,
461                      std::unique_ptr<HwlPipelineResult> result,
462                      bool reprocess_request,
463                      std::unique_ptr<HwlPipelineResult> partial_result);
464 
GetBaseGainFactor(float max_raw_value)465   static float GetBaseGainFactor(float max_raw_value) {
466     return max_raw_value / EmulatedSensor::kSaturationElectrons;
467   }
468 
469   nsecs_t getSystemTimeWithSource(uint32_t timestamp_source);
470 };
471 
472 }  // namespace android
473 
474 #endif  // HW_EMULATOR_CAMERA2_SENSOR_H
475