• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <netinet/in.h>
22 
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 
28 #include "JpegProcessor.h"
29 #include <gui/SurfaceTextureClient.h>
30 #include "../Camera2Device.h"
31 #include "../Camera2Client.h"
32 
33 
34 namespace android {
35 namespace camera2 {
36 
JpegProcessor(wp<Camera2Client> client,wp<CaptureSequencer> sequencer)37 JpegProcessor::JpegProcessor(
38     wp<Camera2Client> client,
39     wp<CaptureSequencer> sequencer):
40         Thread(false),
41         mClient(client),
42         mSequencer(sequencer),
43         mCaptureAvailable(false),
44         mCaptureStreamId(NO_STREAM) {
45 }
46 
~JpegProcessor()47 JpegProcessor::~JpegProcessor() {
48     ALOGV("%s: Exit", __FUNCTION__);
49     deleteStream();
50 }
51 
onFrameAvailable()52 void JpegProcessor::onFrameAvailable() {
53     Mutex::Autolock l(mInputMutex);
54     if (!mCaptureAvailable) {
55         mCaptureAvailable = true;
56         mCaptureAvailableSignal.signal();
57     }
58 }
59 
updateStream(const Parameters & params)60 status_t JpegProcessor::updateStream(const Parameters &params) {
61     ATRACE_CALL();
62     ALOGV("%s", __FUNCTION__);
63     status_t res;
64 
65     Mutex::Autolock l(mInputMutex);
66 
67     sp<Camera2Client> client = mClient.promote();
68     if (client == 0) return OK;
69     sp<Camera2Device> device = client->getCameraDevice();
70 
71     // Find out buffer size for JPEG
72     camera_metadata_ro_entry_t maxJpegSize =
73             params.staticInfo(ANDROID_JPEG_MAX_SIZE);
74     if (maxJpegSize.count == 0) {
75         ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
76                 __FUNCTION__, client->getCameraId());
77         return INVALID_OPERATION;
78     }
79 
80     if (mCaptureConsumer == 0) {
81         // Create CPU buffer queue endpoint
82         mCaptureConsumer = new CpuConsumer(1);
83         mCaptureConsumer->setFrameAvailableListener(this);
84         mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
85         mCaptureWindow = new SurfaceTextureClient(
86             mCaptureConsumer->getProducerInterface());
87         // Create memory for API consumption
88         mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
89                                        "Camera2Client::CaptureHeap");
90         if (mCaptureHeap->getSize() == 0) {
91             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
92                     __FUNCTION__, client->getCameraId());
93             return NO_MEMORY;
94         }
95     }
96 
97     if (mCaptureStreamId != NO_STREAM) {
98         // Check if stream parameters have to change
99         uint32_t currentWidth, currentHeight;
100         res = device->getStreamInfo(mCaptureStreamId,
101                 &currentWidth, &currentHeight, 0);
102         if (res != OK) {
103             ALOGE("%s: Camera %d: Error querying capture output stream info: "
104                     "%s (%d)", __FUNCTION__,
105                     client->getCameraId(), strerror(-res), res);
106             return res;
107         }
108         if (currentWidth != (uint32_t)params.pictureWidth ||
109                 currentHeight != (uint32_t)params.pictureHeight) {
110             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
111                 __FUNCTION__, client->getCameraId(), mCaptureStreamId);
112             res = device->deleteStream(mCaptureStreamId);
113             if (res != OK) {
114                 ALOGE("%s: Camera %d: Unable to delete old output stream "
115                         "for capture: %s (%d)", __FUNCTION__,
116                         client->getCameraId(), strerror(-res), res);
117                 return res;
118             }
119             mCaptureStreamId = NO_STREAM;
120         }
121     }
122 
123     if (mCaptureStreamId == NO_STREAM) {
124         // Create stream for HAL production
125         res = device->createStream(mCaptureWindow,
126                 params.pictureWidth, params.pictureHeight,
127                 HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
128                 &mCaptureStreamId);
129         if (res != OK) {
130             ALOGE("%s: Camera %d: Can't create output stream for capture: "
131                     "%s (%d)", __FUNCTION__, client->getCameraId(),
132                     strerror(-res), res);
133             return res;
134         }
135 
136     }
137     return OK;
138 }
139 
deleteStream()140 status_t JpegProcessor::deleteStream() {
141     ATRACE_CALL();
142     status_t res;
143 
144     Mutex::Autolock l(mInputMutex);
145 
146     if (mCaptureStreamId != NO_STREAM) {
147         sp<Camera2Client> client = mClient.promote();
148         if (client == 0) return OK;
149         sp<Camera2Device> device = client->getCameraDevice();
150 
151         device->deleteStream(mCaptureStreamId);
152 
153         mCaptureHeap.clear();
154         mCaptureWindow.clear();
155         mCaptureConsumer.clear();
156 
157         mCaptureStreamId = NO_STREAM;
158     }
159     return OK;
160 }
161 
getStreamId() const162 int JpegProcessor::getStreamId() const {
163     Mutex::Autolock l(mInputMutex);
164     return mCaptureStreamId;
165 }
166 
dump(int fd,const Vector<String16> & args) const167 void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
168 }
169 
threadLoop()170 bool JpegProcessor::threadLoop() {
171     status_t res;
172 
173     {
174         Mutex::Autolock l(mInputMutex);
175         while (!mCaptureAvailable) {
176             res = mCaptureAvailableSignal.waitRelative(mInputMutex,
177                     kWaitDuration);
178             if (res == TIMED_OUT) return true;
179         }
180         mCaptureAvailable = false;
181     }
182 
183     do {
184         sp<Camera2Client> client = mClient.promote();
185         if (client == 0) return false;
186         res = processNewCapture(client);
187     } while (res == OK);
188 
189     return true;
190 }
191 
processNewCapture(sp<Camera2Client> & client)192 status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
193     ATRACE_CALL();
194     status_t res;
195     sp<Camera2Heap> captureHeap;
196 
197     CpuConsumer::LockedBuffer imgBuffer;
198 
199     res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
200     if (res != OK) {
201         if (res != BAD_VALUE) {
202             ALOGE("%s: Camera %d: Error receiving still image buffer: "
203                     "%s (%d)", __FUNCTION__,
204                     client->getCameraId(), strerror(-res), res);
205         }
206         return res;
207     }
208 
209     ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
210             client->getCameraId());
211 
212     if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
213         ALOGE("%s: Camera %d: Unexpected format for still image: "
214                 "%x, expected %x", __FUNCTION__, client->getCameraId(),
215                 imgBuffer.format,
216                 HAL_PIXEL_FORMAT_BLOB);
217         mCaptureConsumer->unlockBuffer(imgBuffer);
218         return OK;
219     }
220 
221     // Find size of JPEG image
222     size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
223     if (jpegSize == 0) { // failed to find size, default to whole buffer
224         jpegSize = imgBuffer.width;
225     }
226     size_t heapSize = mCaptureHeap->getSize();
227     if (jpegSize > heapSize) {
228         ALOGW("%s: JPEG image is larger than expected, truncating "
229                 "(got %d, expected at most %d bytes)",
230                 __FUNCTION__, jpegSize, heapSize);
231         jpegSize = heapSize;
232     }
233 
234     // TODO: Optimize this to avoid memcopy
235     sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
236     void* captureMemory = mCaptureHeap->getBase();
237     memcpy(captureMemory, imgBuffer.data, jpegSize);
238 
239     mCaptureConsumer->unlockBuffer(imgBuffer);
240 
241     sp<CaptureSequencer> sequencer = mSequencer.promote();
242     if (sequencer != 0) {
243         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
244     }
245 
246     return OK;
247 }
248 
249 /*
250  * JPEG FILE FORMAT OVERVIEW.
251  * http://www.jpeg.org/public/jfif.pdf
252  * (JPEG is the image compression algorithm, actual file format is called JFIF)
253  *
254  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
255  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
256  * (inclusive).  Because every marker begins with the same byte, they are
257  * referred to by the second byte's value.
258  *
259  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
260  * Following it, "segment" sections begin with other markers, followed by a
261  * 2-byte length (in network byte order), then the segment data.
262  *
263  * For our purposes we will ignore the data, and just use the length to skip to
264  * the next segment.  This is necessary because the data inside segments are
265  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
266  * naievely scanning until the end.
267  *
268  * After all the segments are processed, the jpeg compressed image stream begins.
269  * This can be considered an opaque format with one requirement: all 0xFF bytes
270  * in this stream must be followed with a 0x00 byte.  This prevents any of the
271  * image data to be interpreted as a segment.  The only exception to this is at
272  * the end of the image stream there is an End of Image (EOI) marker, which is
273  * 0xFF followed by a non-zero (0xD9) byte.
274  */
275 
276 const uint8_t MARK = 0xFF; // First byte of marker
277 const uint8_t SOI = 0xD8; // Start of Image
278 const uint8_t EOI = 0xD9; // End of Image
279 const size_t MARKER_LENGTH = 2; // length of a marker
280 
281 #pragma pack(push)
282 #pragma pack(1)
283 typedef struct segment {
284     uint8_t marker[MARKER_LENGTH];
285     uint16_t length;
286 } segment_t;
287 #pragma pack(pop)
288 
289 /* HELPER FUNCTIONS */
290 
291 // check for Start of Image marker
checkJpegStart(uint8_t * buf)292 bool checkJpegStart(uint8_t* buf) {
293     return buf[0] == MARK && buf[1] == SOI;
294 }
295 // check for End of Image marker
checkJpegEnd(uint8_t * buf)296 bool checkJpegEnd(uint8_t *buf) {
297     return buf[0] == MARK && buf[1] == EOI;
298 }
299 // check for arbitrary marker, returns marker type (second byte)
300 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)301 uint8_t checkJpegMarker(uint8_t *buf) {
302     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
303         return buf[1];
304     }
305     return 0;
306 }
307 
308 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)309 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
310     size_t size;
311 
312     // First check for JPEG transport header at the end of the buffer
313     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
314     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
315     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
316         size = blob->jpeg_size;
317         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
318             // Verify SOI and EOI markers
319             size_t offset = size - MARKER_LENGTH;
320             uint8_t *end = jpegBuffer + offset;
321             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
322                 ALOGV("Found JPEG transport header, img size %d", size);
323                 return size;
324             } else {
325                 ALOGW("Found JPEG transport header with bad Image Start/End");
326             }
327         } else {
328             ALOGW("Found JPEG transport header with bad size %d", size);
329         }
330     }
331 
332     // Check Start of Image
333     if ( !checkJpegStart(jpegBuffer) ) {
334         ALOGE("Could not find start of JPEG marker");
335         return 0;
336     }
337 
338     // Read JFIF segment markers, skip over segment data
339     size = 0;
340     while (size <= maxSize - MARKER_LENGTH) {
341         segment_t *segment = (segment_t*)(jpegBuffer + size);
342         uint8_t type = checkJpegMarker(segment->marker);
343         if (type == 0) { // invalid marker, no more segments, begin JPEG data
344             ALOGV("JPEG stream found beginning at offset %d", size);
345             break;
346         }
347         if (type == EOI || size > maxSize - sizeof(segment_t)) {
348             ALOGE("Got premature End before JPEG data, offset %d", size);
349             return 0;
350         }
351         size_t length = ntohs(segment->length);
352         ALOGV("JFIF Segment, type %x length %x", type, length);
353         size += length + MARKER_LENGTH;
354     }
355 
356     // Find End of Image
357     // Scan JPEG buffer until End of Image (EOI)
358     bool foundEnd = false;
359     for (size; size <= maxSize - MARKER_LENGTH; size++) {
360         if ( checkJpegEnd(jpegBuffer + size) ) {
361             foundEnd = true;
362             size += MARKER_LENGTH;
363             break;
364         }
365     }
366     if (!foundEnd) {
367         ALOGE("Could not find end of JPEG marker");
368         return 0;
369     }
370 
371     if (size > maxSize) {
372         ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
373         size = maxSize;
374     }
375     ALOGV("Final JPEG size %d", size);
376     return size;
377 }
378 
379 }; // namespace camera2
380 }; // namespace android
381