1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /** 18 * This class is a simple simulation of a typical CMOS cellphone imager chip, 19 * which outputs 12-bit Bayer-mosaic raw images. 20 * 21 * Unlike most real image sensors, this one's native color space is linear sRGB. 22 * 23 * The sensor is abstracted as operating as a pipeline 3 stages deep; 24 * conceptually, each frame to be captured goes through these three stages. The 25 * processing step for the sensor is marked off by vertical sync signals, which 26 * indicate the start of readout of the oldest frame. The interval between 27 * processing steps depends on the frame duration of the frame currently being 28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During 29 * configuration, the sensor's registers for settings such as exposure time, 30 * frame duration, and gain are set for the next frame to be captured. In stage 31 * 2, the image data for the frame is actually captured by the sensor. Finally, 32 * in stage 3, the just-captured data is read out and sent to the rest of the 33 * system. 34 * 35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the 36 * sensor are exposed earlier in time than larger-numbered rows, with the time 37 * offset between each row being equal to the row readout time. 38 * 39 * The characteristics of this sensor don't correspond to any actual sensor, 40 * but are not far off typical sensors. 41 * 42 * Example timing diagram, with three frames: 43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms. 44 * Frame 2: Frame duration 75 ms, exposure time 65 ms. 45 * Legend: 46 * C = update sensor registers for frame 47 * v = row in reset (vertical blanking interval) 48 * E = row capturing image data 49 * R = row being read out 50 * | = vertical sync signal 51 *time(ms)| 0 55 105 155 230 270 52 * Frame 0| :configure : capture : readout : : : 53 * Row # | ..|CCCC______|_________|_________| : : 54 * 0 | :\ \vvvvvEEEER \ : : 55 * 500 | : \ \vvvvvEEEER \ : : 56 * 1000 | : \ \vvvvvEEEER \ : : 57 * 1500 | : \ \vvvvvEEEER \ : : 58 * 2000 | : \__________\vvvvvEEEER_________\ : : 59 * Frame 1| : configure capture readout : : 60 * Row # | : |CCCC_____|_________|______________| : 61 * 0 | : :\ \vvvvvEEEER \ : 62 * 500 | : : \ \vvvvvEEEER \ : 63 * 1000 | : : \ \vvvvvEEEER \ : 64 * 1500 | : : \ \vvvvvEEEER \ : 65 * 2000 | : : \_________\vvvvvEEEER______________\ : 66 * Frame 2| : : configure capture readout: 67 * Row # | : : |CCCC_____|______________|_______|... 68 * 0 | : : :\ \vEEEEEEEEEEEEER \ 69 * 500 | : : : \ \vEEEEEEEEEEEEER \ 70 * 1000 | : : : \ \vEEEEEEEEEEEEER \ 71 * 1500 | : : : \ \vEEEEEEEEEEEEER \ 72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\ 73 */ 74 75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H 76 #define HW_EMULATOR_CAMERA2_SENSOR_H 77 78 #include "utils/Thread.h" 79 #include "utils/Mutex.h" 80 #include "utils/Timers.h" 81 82 #include "Scene.h" 83 #include "Base.h" 84 85 namespace android { 86 87 class EmulatedFakeCamera2; 88 89 class Sensor: private Thread, public virtual RefBase { 90 public: 91 92 // width: Width of pixel array 93 // height: Height of pixel array 94 Sensor(uint32_t width, uint32_t height); 95 ~Sensor(); 96 97 /* 98 * Power control 99 */ 100 101 status_t startUp(); 102 status_t shutDown(); 103 104 /* 105 * Access to scene 106 */ 107 Scene &getScene(); 108 109 /* 110 * Controls that can be updated every frame 111 */ 112 113 void setExposureTime(uint64_t ns); 114 void setFrameDuration(uint64_t ns); 115 void setSensitivity(uint32_t gain); 116 // Buffer must be at least stride*height*2 bytes in size 117 void setDestinationBuffers(Buffers *buffers); 118 // To simplify tracking sensor's current frame 119 void setFrameNumber(uint32_t frameNumber); 120 121 /* 122 * Controls that cause reconfiguration delay 123 */ 124 125 void setBinning(int horizontalFactor, int verticalFactor); 126 127 /* 128 * Synchronizing with sensor operation (vertical sync) 129 */ 130 131 // Wait until the sensor outputs its next vertical sync signal, meaning it 132 // is starting readout of its latest frame of data. Returns true if vertical 133 // sync is signaled, false if the wait timed out. 134 bool waitForVSync(nsecs_t reltime); 135 136 // Wait until a new frame has been read out, and then return the time 137 // capture started. May return immediately if a new frame has been pushed 138 // since the last wait for a new frame. Returns true if new frame is 139 // returned, false if timed out. 140 bool waitForNewFrame(nsecs_t reltime, 141 nsecs_t *captureTime); 142 143 /* 144 * Interrupt event servicing from the sensor. Only triggers for sensor 145 * cycles that have valid buffers to write to. 146 */ 147 struct SensorListener { 148 enum Event { 149 EXPOSURE_START, // Start of exposure 150 }; 151 152 virtual void onSensorEvent(uint32_t frameNumber, Event e, 153 nsecs_t timestamp) = 0; 154 virtual ~SensorListener(); 155 }; 156 157 void setSensorListener(SensorListener *listener); 158 159 /** 160 * Static sensor characteristics 161 */ 162 const uint32_t mResolution[2]; 163 const uint32_t mActiveArray[4]; 164 165 static const nsecs_t kExposureTimeRange[2]; 166 static const nsecs_t kFrameDurationRange[2]; 167 static const nsecs_t kMinVerticalBlank; 168 169 static const uint8_t kColorFilterArrangement; 170 171 // Output image data characteristics 172 static const uint32_t kMaxRawValue; 173 static const uint32_t kBlackLevel; 174 // Sensor sensitivity, approximate 175 176 static const float kSaturationVoltage; 177 static const uint32_t kSaturationElectrons; 178 static const float kVoltsPerLuxSecond; 179 static const float kElectronsPerLuxSecond; 180 181 static const float kBaseGainFactor; 182 183 static const float kReadNoiseStddevBeforeGain; // In electrons 184 static const float kReadNoiseStddevAfterGain; // In raw digital units 185 static const float kReadNoiseVarBeforeGain; 186 static const float kReadNoiseVarAfterGain; 187 188 // While each row has to read out, reset, and then expose, the (reset + 189 // expose) sequence can be overlapped by other row readouts, so the final 190 // minimum frame duration is purely a function of row readout time, at least 191 // if there's a reasonable number of rows. 192 const nsecs_t mRowReadoutTime; 193 194 static const int32_t kSensitivityRange[2]; 195 static const uint32_t kDefaultSensitivity; 196 197 private: 198 Mutex mControlMutex; // Lock before accessing control parameters 199 // Start of control parameters 200 Condition mVSync; 201 bool mGotVSync; 202 uint64_t mExposureTime; 203 uint64_t mFrameDuration; 204 uint32_t mGainFactor; 205 Buffers *mNextBuffers; 206 uint32_t mFrameNumber; 207 208 // End of control parameters 209 210 Mutex mReadoutMutex; // Lock before accessing readout variables 211 // Start of readout variables 212 Condition mReadoutAvailable; 213 Condition mReadoutComplete; 214 Buffers *mCapturedBuffers; 215 nsecs_t mCaptureTime; 216 SensorListener *mListener; 217 // End of readout variables 218 219 // Time of sensor startup, used for simulation zero-time point 220 nsecs_t mStartupTime; 221 222 /** 223 * Inherited Thread virtual overrides, and members only used by the 224 * processing thread 225 */ 226 private: 227 virtual status_t readyToRun(); 228 229 virtual bool threadLoop(); 230 231 nsecs_t mNextCaptureTime; 232 Buffers *mNextCapturedBuffers; 233 234 int mSceneWidth; 235 int mSceneHeight; 236 Scene mScene; 237 238 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride); 239 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height); 240 void captureRGB(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height); 241 void captureYU12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height); 242 void captureDepth(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height); 243 void captureDepthCloud(uint8_t *img); 244 245 }; 246 247 } 248 249 #endif // HW_EMULATOR_CAMERA2_SENSOR_H 250