• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * This class is a simple simulation of a typical CMOS cellphone imager chip,
19  * which outputs 12-bit Bayer-mosaic raw images.
20  *
21  * Unlike most real image sensors, this one's native color space is linear sRGB.
22  *
23  * The sensor is abstracted as operating as a pipeline 3 stages deep;
24  * conceptually, each frame to be captured goes through these three stages. The
25  * processing step for the sensor is marked off by vertical sync signals, which
26  * indicate the start of readout of the oldest frame. The interval between
27  * processing steps depends on the frame duration of the frame currently being
28  * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29  * configuration, the sensor's registers for settings such as exposure time,
30  * frame duration, and gain are set for the next frame to be captured. In stage
31  * 2, the image data for the frame is actually captured by the sensor. Finally,
32  * in stage 3, the just-captured data is read out and sent to the rest of the
33  * system.
34  *
35  * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36  * sensor are exposed earlier in time than larger-numbered rows, with the time
37  * offset between each row being equal to the row readout time.
38  *
39  * The characteristics of this sensor don't correspond to any actual sensor,
40  * but are not far off typical sensors.
41  *
42  * Example timing diagram, with three frames:
43  *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44  *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
45  * Legend:
46  *   C = update sensor registers for frame
47  *   v = row in reset (vertical blanking interval)
48  *   E = row capturing image data
49  *   R = row being read out
50  *   | = vertical sync signal
51  *time(ms)|   0          55        105       155            230     270
52  * Frame 0|   :configure : capture : readout :              :       :
53  *  Row # | ..|CCCC______|_________|_________|              :       :
54  *      0 |   :\          \vvvvvEEEER         \             :       :
55  *    500 |   : \          \vvvvvEEEER         \            :       :
56  *   1000 |   :  \          \vvvvvEEEER         \           :       :
57  *   1500 |   :   \          \vvvvvEEEER         \          :       :
58  *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
59  * Frame 1|   :           configure  capture      readout   :       :
60  *  Row # |   :          |CCCC_____|_________|______________|       :
61  *      0 |   :          :\         \vvvvvEEEER              \      :
62  *    500 |   :          : \         \vvvvvEEEER              \     :
63  *   1000 |   :          :  \         \vvvvvEEEER              \    :
64  *   1500 |   :          :   \         \vvvvvEEEER              \   :
65  *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
66  * Frame 2|   :          :          configure     capture    readout:
67  *  Row # |   :          :         |CCCC_____|______________|_______|...
68  *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
69  *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
70  *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
71  *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
72  *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
73  */
74 
75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76 #define HW_EMULATOR_CAMERA2_SENSOR_H
77 
78 #include <hwl_types.h>
79 
80 #include <algorithm>
81 #include <functional>
82 
83 #include "Base.h"
84 #include "EmulatedScene.h"
85 #include "JpegCompressor.h"
86 #include "utils/Mutex.h"
87 #include "utils/StreamConfigurationMap.h"
88 #include "utils/Thread.h"
89 #include "utils/Timers.h"
90 
91 namespace android {
92 
93 using google_camera_hal::HwlPipelineCallback;
94 using google_camera_hal::HwlPipelineResult;
95 using google_camera_hal::StreamConfiguration;
96 
97 /*
98  * Default to sRGB with D65 white point
99  */
100 struct ColorFilterXYZ {
101   float rX = 3.2406f;
102   float rY = -1.5372f;
103   float rZ = -0.4986f;
104   float grX = -0.9689f;
105   float grY = 1.8758f;
106   float grZ = 0.0415f;
107   float gbX = -0.9689f;
108   float gbY = 1.8758f;
109   float gbZ = 0.0415f;
110   float bX = 0.0557f;
111   float bY = -0.2040f;
112   float bZ = 1.0570f;
113 };
114 
115 typedef std::unordered_map<
116     camera_metadata_enum_android_request_available_dynamic_range_profiles_map,
117     std::unordered_set<
118         camera_metadata_enum_android_request_available_dynamic_range_profiles_map>>
119     ProfileMap;
120 
121 struct SensorCharacteristics {
122   size_t width = 0;
123   size_t height = 0;
124   size_t full_res_width = 0;
125   size_t full_res_height = 0;
126   nsecs_t exposure_time_range[2] = {0};
127   nsecs_t frame_duration_range[2] = {0};
128   int32_t sensitivity_range[2] = {0};
129   camera_metadata_enum_android_sensor_info_color_filter_arrangement
130       color_arangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
131   ColorFilterXYZ color_filter;
132   uint32_t max_raw_value = 0;
133   uint32_t black_level_pattern[4] = {0};
134   uint32_t max_raw_streams = 0;
135   uint32_t max_processed_streams = 0;
136   uint32_t max_stalling_streams = 0;
137   uint32_t max_input_streams = 0;
138   uint32_t physical_size[2] = {0};
139   bool is_flash_supported = false;
140   uint32_t lens_shading_map_size[2] = {0};
141   uint32_t max_pipeline_depth = 0;
142   uint32_t orientation = 0;
143   bool is_front_facing = false;
144   bool quad_bayer_sensor = false;
145   bool is_10bit_dynamic_range_capable = false;
146   ProfileMap dynamic_range_profiles;
147   bool support_stream_use_case = false;
148 };
149 
150 // Maps logical/physical camera ids to sensor characteristics
151 typedef std::unordered_map<uint32_t, SensorCharacteristics> LogicalCharacteristics;
152 
153 class EmulatedSensor : private Thread, public virtual RefBase {
154  public:
155   EmulatedSensor();
156   ~EmulatedSensor();
157 
OverrideFormat(android_pixel_format_t format,camera_metadata_enum_android_request_available_dynamic_range_profiles_map profile)158   static android_pixel_format_t OverrideFormat(
159       android_pixel_format_t format,
160       camera_metadata_enum_android_request_available_dynamic_range_profiles_map
161           profile) {
162     switch (profile) {
163       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
164         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
165           return HAL_PIXEL_FORMAT_YCBCR_420_888;
166         }
167         break;
168       case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
169         if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
170           return static_cast<android_pixel_format_t>(
171               HAL_PIXEL_FORMAT_YCBCR_P010);
172         }
173         break;
174       default:
175         ALOGE("%s: Unsupported dynamic range profile 0x%x", __FUNCTION__,
176               profile);
177     }
178 
179     return format;
180   }
181 
IsReprocessPathSupported(android_pixel_format_t input_format,android_pixel_format_t output_format)182   static bool IsReprocessPathSupported(android_pixel_format_t input_format,
183                                        android_pixel_format_t output_format) {
184     if ((HAL_PIXEL_FORMAT_YCBCR_420_888 == input_format) &&
185         ((HAL_PIXEL_FORMAT_YCBCR_420_888 == output_format) ||
186          (HAL_PIXEL_FORMAT_BLOB == output_format))) {
187       return true;
188     }
189 
190     if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
191         HAL_PIXEL_FORMAT_RAW16 == output_format) {
192       return true;
193     }
194 
195     return false;
196   }
197 
198   static bool AreCharacteristicsSupported(
199       const SensorCharacteristics& characteristics);
200 
201   static bool IsStreamCombinationSupported(
202       uint32_t logical_id, const StreamConfiguration& config,
203       StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
204       const PhysicalStreamConfigurationMap& physical_map,
205       const PhysicalStreamConfigurationMap& physical_map_max_resolution,
206       const LogicalCharacteristics& sensor_chars);
207 
208   static bool IsStreamCombinationSupported(
209       uint32_t logical_id, const StreamConfiguration& config,
210       StreamConfigurationMap& map,
211       const PhysicalStreamConfigurationMap& physical_map,
212       const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
213 
214   /*
215    * Power control
216    */
217 
218   status_t StartUp(uint32_t logical_camera_id,
219                    std::unique_ptr<LogicalCharacteristics> logical_chars);
220   status_t ShutDown();
221 
222   /*
223    * Physical camera settings control
224    */
225   struct SensorSettings {
226     nsecs_t exposure_time = 0;
227     nsecs_t frame_duration = 0;
228     uint32_t gain = 0;  // ISO
229     uint32_t lens_shading_map_mode;
230     bool report_neutral_color_point = false;
231     bool report_green_split = false;
232     bool report_noise_profile = false;
233     float zoom_ratio = 1.0f;
234     bool report_rotate_and_crop = false;
235     uint8_t rotate_and_crop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
236     bool report_video_stab = false;
237     uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
238     bool report_edge_mode = false;
239     uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
240     uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
241     uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
242     uint32_t test_pattern_data[4] = {0, 0, 0, 0};
243     uint32_t screen_rotation = 0;
244   };
245 
246   // Maps physical and logical camera ids to individual device settings
247   typedef std::unordered_map<uint32_t, SensorSettings> LogicalCameraSettings;
248 
249   void SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,
250                          std::unique_ptr<HwlPipelineResult> result,
251                          std::unique_ptr<Buffers> input_buffers,
252                          std::unique_ptr<Buffers> output_buffers);
253 
254   status_t Flush();
255 
256   /*
257    * Synchronizing with sensor operation (vertical sync)
258    */
259 
260   // Wait until the sensor outputs its next vertical sync signal, meaning it
261   // is starting readout of its latest frame of data. Returns true if vertical
262   // sync is signaled, false if the wait timed out.
263   bool WaitForVSync(nsecs_t rel_time);
264 
265   static const nsecs_t kSupportedExposureTimeRange[2];
266   static const nsecs_t kSupportedFrameDurationRange[2];
267   static const int32_t kSupportedSensitivityRange[2];
268   static const uint8_t kSupportedColorFilterArrangement;
269   static const uint32_t kDefaultMaxRawValue;
270   static const nsecs_t kDefaultExposureTime;
271   static const int32_t kDefaultSensitivity;
272   static const nsecs_t kDefaultFrameDuration;
273   static const nsecs_t kReturnResultThreshod;
274   static const uint32_t kDefaultBlackLevelPattern[4];
275   static const camera_metadata_rational kDefaultColorTransform[9];
276   static const float kDefaultColorCorrectionGains[4];
277   static const float kDefaultToneMapCurveRed[4];
278   static const float kDefaultToneMapCurveGreen[4];
279   static const float kDefaultToneMapCurveBlue[4];
280   static const uint8_t kPipelineDepth;
281 
282  private:
283   // Scene stabilization
284   static const uint32_t kRegularSceneHandshake;
285   static const uint32_t kReducedSceneHandshake;
286 
287   /**
288    * Logical characteristics
289    */
290   std::unique_ptr<LogicalCharacteristics> chars_;
291 
292   uint32_t logical_camera_id_ = 0;
293 
294   static const nsecs_t kMinVerticalBlank;
295 
296   // Sensor sensitivity, approximate
297 
298   static const float kSaturationVoltage;
299   static const uint32_t kSaturationElectrons;
300   static const float kVoltsPerLuxSecond;
301   static const float kElectronsPerLuxSecond;
302 
303   static const float kReadNoiseStddevBeforeGain;  // In electrons
304   static const float kReadNoiseStddevAfterGain;   // In raw digital units
305   static const float kReadNoiseVarBeforeGain;
306   static const float kReadNoiseVarAfterGain;
307   static const camera_metadata_rational kNeutralColorPoint[3];
308   static const float kGreenSplit;
309 
310   static const uint32_t kMaxRAWStreams;
311   static const uint32_t kMaxProcessedStreams;
312   static const uint32_t kMaxStallingStreams;
313   static const uint32_t kMaxInputStreams;
314   static const uint32_t kMaxLensShadingMapSize[2];
315   static const int32_t kFixedBitPrecision;
316   static const int32_t kSaturationPoint;
317 
318   std::vector<int32_t> gamma_table_;
319 
320   Mutex control_mutex_;  // Lock before accessing control parameters
321   // Start of control parameters
322   Condition vsync_;
323   bool got_vsync_;
324   std::unique_ptr<LogicalCameraSettings> current_settings_;
325   std::unique_ptr<HwlPipelineResult> current_result_;
326   std::unique_ptr<Buffers> current_output_buffers_;
327   std::unique_ptr<Buffers> current_input_buffers_;
328   std::unique_ptr<JpegCompressor> jpeg_compressor_;
329 
330   // End of control parameters
331 
332   unsigned int rand_seed_ = 1;
333 
334   /**
335    * Inherited Thread virtual overrides, and members only used by the
336    * processing thread
337    */
338   bool threadLoop() override;
339 
340   nsecs_t next_capture_time_;
341   nsecs_t next_readout_time_;
342 
343   struct SensorBinningFactorInfo {
344     bool has_raw_stream = false;
345     bool has_non_raw_stream = false;
346     bool quad_bayer_sensor = false;
347     bool max_res_request = false;
348   };
349 
350   std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
351 
352   std::unique_ptr<EmulatedScene> scene_;
353 
354   static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
355 
356   static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
357                                      int xstart, int ystart,
358                                      int row_stride_in_bytes);
359 
360   static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
361                                      size_t row_stride_in_bytes,
362                                      const SensorCharacteristics& chars);
363 
364   void CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
365                         const SensorCharacteristics& chars);
366 
367   void CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
368                          uint32_t gain, const SensorCharacteristics& chars);
369 
370   enum RGBLayout { RGB, RGBA, ARGB };
371   void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
372                   uint32_t stride, RGBLayout layout, uint32_t gain,
373                   const SensorCharacteristics& chars);
374   void CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width, uint32_t height,
375                      uint32_t gain, float zoom_ratio, bool rotate,
376                      const SensorCharacteristics& chars);
377   void CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width, uint32_t height,
378                     uint32_t stride, const SensorCharacteristics& chars);
379 
380   struct YUV420Frame {
381     uint32_t width = 0;
382     uint32_t height = 0;
383     YCbCrPlanes planes;
384   };
385 
386   enum ProcessType { REPROCESS, HIGH_QUALITY, REGULAR };
387   status_t ProcessYUV420(const YUV420Frame& input, const YUV420Frame& output,
388                          uint32_t gain, ProcessType process_type,
389                          float zoom_ratio, bool rotate_and_crop,
390                          const SensorCharacteristics& chars);
391 
392   inline int32_t ApplysRGBGamma(int32_t value, int32_t saturation);
393 
394   bool WaitForVSyncLocked(nsecs_t reltime);
395   void CalculateAndAppendNoiseProfile(float gain /*in ISO*/,
396                                       float base_gain_factor,
397                                       HalCameraMetadata* result /*out*/);
398 
399   void ReturnResults(HwlPipelineCallback callback,
400                      std::unique_ptr<LogicalCameraSettings> settings,
401                      std::unique_ptr<HwlPipelineResult> result,
402                      bool reprocess_request);
403 
GetBaseGainFactor(float max_raw_value)404   static float GetBaseGainFactor(float max_raw_value) {
405     return max_raw_value / EmulatedSensor::kSaturationElectrons;
406   }
407 };
408 
409 }  // namespace android
410 
411 #endif  // HW_EMULATOR_CAMERA2_SENSOR_H
412