• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * This class is a simple simulation of a typical CMOS cellphone imager chip,
19  * which outputs 12-bit Bayer-mosaic raw images.
20  *
21  * Unlike most real image sensors, this one's native color space is linear sRGB.
22  *
23  * The sensor is abstracted as operating as a pipeline 3 stages deep;
24  * conceptually, each frame to be captured goes through these three stages. The
25  * processing step for the sensor is marked off by vertical sync signals, which
26  * indicate the start of readout of the oldest frame. The interval between
27  * processing steps depends on the frame duration of the frame currently being
28  * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29  * configuration, the sensor's registers for settings such as exposure time,
30  * frame duration, and gain are set for the next frame to be captured. In stage
31  * 2, the image data for the frame is actually captured by the sensor. Finally,
32  * in stage 3, the just-captured data is read out and sent to the rest of the
33  * system.
34  *
35  * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36  * sensor are exposed earlier in time than larger-numbered rows, with the time
37  * offset between each row being equal to the row readout time.
38  *
39  * The characteristics of this sensor don't correspond to any actual sensor,
40  * but are not far off typical sensors.
41  *
42  * Example timing diagram, with three frames:
43  *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44  *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
45  * Legend:
46  *   C = update sensor registers for frame
47  *   v = row in reset (vertical blanking interval)
48  *   E = row capturing image data
49  *   R = row being read out
50  *   | = vertical sync signal
51  *time(ms)|   0          55        105       155            230     270
52  * Frame 0|   :configure : capture : readout :              :       :
53  *  Row # | ..|CCCC______|_________|_________|              :       :
54  *      0 |   :\          \vvvvvEEEER         \             :       :
55  *    500 |   : \          \vvvvvEEEER         \            :       :
56  *   1000 |   :  \          \vvvvvEEEER         \           :       :
57  *   1500 |   :   \          \vvvvvEEEER         \          :       :
58  *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
59  * Frame 1|   :           configure  capture      readout   :       :
60  *  Row # |   :          |CCCC_____|_________|______________|       :
61  *      0 |   :          :\         \vvvvvEEEER              \      :
62  *    500 |   :          : \         \vvvvvEEEER              \     :
63  *   1000 |   :          :  \         \vvvvvEEEER              \    :
64  *   1500 |   :          :   \         \vvvvvEEEER              \   :
65  *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
66  * Frame 2|   :          :          configure     capture    readout:
67  *  Row # |   :          :         |CCCC_____|______________|_______|...
68  *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
69  *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
70  *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
71  *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
72  *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
73  */
74 
75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76 #define HW_EMULATOR_CAMERA2_SENSOR_H
77 
78 #include "utils/Thread.h"
79 #include "utils/Mutex.h"
80 #include "utils/Timers.h"
81 
82 #include "Scene.h"
83 #include "Base.h"
84 namespace android {
85 
86 class EmulatedFakeCamera2;
87 
88 class Sensor: private Thread, public virtual RefBase {
89   public:
90 
91     // width: Width of pixel array
92     // height: Height of pixel array
93     Sensor(uint32_t width, uint32_t height);
94     ~Sensor();
95 
96     /*
97      * Power control
98      */
99 
100     status_t startUp();
101     status_t shutDown();
102 
103     /*
104      * Access to scene
105      */
106     Scene &getScene();
107 
108     /*
109      * Controls that can be updated every frame
110      */
111 
112     void setExposureTime(uint64_t ns);
113     void setFrameDuration(uint64_t ns);
114     void setSensitivity(uint32_t gain);
115     // Buffer must be at least stride*height*2 bytes in size
116     void setDestinationBuffers(Buffers *buffers);
117     // To simplify tracking sensor's current frame
118     void setFrameNumber(uint32_t frameNumber);
119 
120     /*
121      * Controls that cause reconfiguration delay
122      */
123 
124     void setBinning(int horizontalFactor, int verticalFactor);
125 
126     /*
127      * Synchronizing with sensor operation (vertical sync)
128      */
129 
130     // Wait until the sensor outputs its next vertical sync signal, meaning it
131     // is starting readout of its latest frame of data. Returns true if vertical
132     // sync is signaled, false if the wait timed out.
133     bool waitForVSync(nsecs_t reltime);
134 
135     // Wait until a new frame has been read out, and then return the time
136     // capture started.  May return immediately if a new frame has been pushed
137     // since the last wait for a new frame. Returns true if new frame is
138     // returned, false if timed out.
139     bool waitForNewFrame(nsecs_t reltime,
140             nsecs_t *captureTime);
141 
142     /*
143      * Interrupt event servicing from the sensor. Only triggers for sensor
144      * cycles that have valid buffers to write to.
145      */
146     struct SensorListener {
147         enum Event {
148             EXPOSURE_START, // Start of exposure
149         };
150 
151         virtual void onSensorEvent(uint32_t frameNumber, Event e,
152                 nsecs_t timestamp) = 0;
153         virtual ~SensorListener();
154     };
155 
156     void setSensorListener(SensorListener *listener);
157 
158     /**
159      * Static sensor characteristics
160      */
161     const uint32_t mResolution[2];
162     const uint32_t mActiveArray[4];
163 
164     static const nsecs_t kExposureTimeRange[2];
165     static const nsecs_t kFrameDurationRange[2];
166     static const nsecs_t kMinVerticalBlank;
167 
168     static const uint8_t kColorFilterArrangement;
169 
170     // Output image data characteristics
171     static const uint32_t kMaxRawValue;
172     static const uint32_t kBlackLevel;
173     // Sensor sensitivity, approximate
174 
175     static const float kSaturationVoltage;
176     static const uint32_t kSaturationElectrons;
177     static const float kVoltsPerLuxSecond;
178     static const float kElectronsPerLuxSecond;
179 
180     static const float kBaseGainFactor;
181 
182     static const float kReadNoiseStddevBeforeGain; // In electrons
183     static const float kReadNoiseStddevAfterGain;  // In raw digital units
184     static const float kReadNoiseVarBeforeGain;
185     static const float kReadNoiseVarAfterGain;
186 
187     // While each row has to read out, reset, and then expose, the (reset +
188     // expose) sequence can be overlapped by other row readouts, so the final
189     // minimum frame duration is purely a function of row readout time, at least
190     // if there's a reasonable number of rows.
191     const nsecs_t mRowReadoutTime;
192 
193     static const int32_t kSensitivityRange[2];
194     static const uint32_t kDefaultSensitivity;
195 
196   private:
197     Mutex mControlMutex; // Lock before accessing control parameters
198     // Start of control parameters
199     Condition mVSync;
200     bool      mGotVSync;
201     uint64_t  mExposureTime;
202     uint64_t  mFrameDuration;
203     uint32_t  mGainFactor;
204     Buffers  *mNextBuffers;
205     uint32_t  mFrameNumber;
206 
207     // End of control parameters
208 
209     Mutex mReadoutMutex; // Lock before accessing readout variables
210     // Start of readout variables
211     Condition mReadoutAvailable;
212     Condition mReadoutComplete;
213     Buffers  *mCapturedBuffers;
214     nsecs_t   mCaptureTime;
215     SensorListener *mListener;
216     // End of readout variables
217 
218     // Time of sensor startup, used for simulation zero-time point
219     nsecs_t mStartupTime;
220 
221     bool mIsMinigbm;
222 
223     /**
224      * Inherited Thread virtual overrides, and members only used by the
225      * processing thread
226      */
227   private:
228     virtual status_t readyToRun();
229 
230     virtual bool threadLoop();
231 
232     nsecs_t mNextCaptureTime;
233     Buffers *mNextCapturedBuffers;
234 
235     int mSceneWidth;
236     int mSceneHeight;
237     Scene mScene;
238 
239     void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
240     void captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height);
241     void captureRGB(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height);
242     void captureYU12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height);
243     void captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height);
244     void captureDepth(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height);
245     void captureDepthCloud(uint8_t *img);
246 
247 };
248 
249 }
250 
251 #endif // HW_EMULATOR_CAMERA2_SENSOR_H
252