• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <netinet/in.h>
22 
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28 
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34 
35 namespace android {
36 namespace camera2 {
37 
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)38 JpegProcessor::JpegProcessor(
39     sp<Camera2Client> client,
40     wp<CaptureSequencer> sequencer):
41         Thread(false),
42         mDevice(client->getCameraDevice()),
43         mSequencer(sequencer),
44         mId(client->getCameraId()),
45         mCaptureDone(false),
46         mCaptureSuccess(false),
47         mCaptureStreamId(NO_STREAM) {
48 }
49 
~JpegProcessor()50 JpegProcessor::~JpegProcessor() {
51     ALOGV("%s: Exit", __FUNCTION__);
52     deleteStream();
53 }
54 
onFrameAvailable(const BufferItem &)55 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
56     Mutex::Autolock l(mInputMutex);
57     ALOGV("%s", __FUNCTION__);
58     if (!mCaptureDone) {
59         mCaptureDone = true;
60         mCaptureSuccess = true;
61         mCaptureDoneSignal.signal();
62     }
63 }
64 
onBufferAcquired(const BufferInfo &)65 void JpegProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
66     // Intentionally left empty
67 }
68 
onBufferReleased(const BufferInfo & bufferInfo)69 void JpegProcessor::onBufferReleased(const BufferInfo& bufferInfo) {
70     ALOGV("%s", __FUNCTION__);
71     if (bufferInfo.mError) {
72         // Only lock in case of error, since we get one of these for each
73         // onFrameAvailable as well, and scheduling may delay this call late
74         // enough to run into later preview restart operations, for non-error
75         // cases.
76         // b/29524651
77         ALOGV("%s: JPEG buffer lost", __FUNCTION__);
78         Mutex::Autolock l(mInputMutex);
79         mCaptureDone = true;
80         mCaptureSuccess = false;
81         mCaptureDoneSignal.signal();
82     }
83 }
84 
updateStream(const Parameters & params)85 status_t JpegProcessor::updateStream(const Parameters &params) {
86     ATRACE_CALL();
87     ALOGV("%s", __FUNCTION__);
88     status_t res;
89 
90     Mutex::Autolock l(mInputMutex);
91 
92     sp<CameraDeviceBase> device = mDevice.promote();
93     if (device == 0) {
94         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
95         return INVALID_OPERATION;
96     }
97 
98     // Find out buffer size for JPEG
99     ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
100     if (maxJpegSize <= 0) {
101         ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
102                 __FUNCTION__, mId, maxJpegSize);
103         return INVALID_OPERATION;
104     }
105 
106     if (mCaptureConsumer == 0) {
107         // Create CPU buffer queue endpoint
108         sp<IGraphicBufferProducer> producer;
109         sp<IGraphicBufferConsumer> consumer;
110         BufferQueue::createBufferQueue(&producer, &consumer);
111         mCaptureConsumer = new CpuConsumer(consumer, 1);
112         mCaptureConsumer->setFrameAvailableListener(this);
113         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
114         mCaptureWindow = new Surface(producer);
115     }
116 
117     // Since ashmem heaps are rounded up to page size, don't reallocate if
118     // the capture heap isn't exactly the same size as the required JPEG buffer
119     const size_t HEAP_SLACK_FACTOR = 2;
120     if (mCaptureHeap == 0 ||
121             (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
122             (mCaptureHeap->getSize() >
123                     static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
124         // Create memory for API consumption
125         mCaptureHeap.clear();
126         mCaptureHeap =
127                 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
128         if (mCaptureHeap->getSize() == 0) {
129             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
130                     __FUNCTION__, mId);
131             return NO_MEMORY;
132         }
133     }
134     ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
135             __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
136 
137     if (mCaptureStreamId != NO_STREAM) {
138         // Check if stream parameters have to change
139         uint32_t currentWidth, currentHeight;
140         res = device->getStreamInfo(mCaptureStreamId,
141                 &currentWidth, &currentHeight, 0, 0);
142         if (res != OK) {
143             ALOGE("%s: Camera %d: Error querying capture output stream info: "
144                     "%s (%d)", __FUNCTION__,
145                     mId, strerror(-res), res);
146             return res;
147         }
148         if (currentWidth != (uint32_t)params.pictureWidth ||
149                 currentHeight != (uint32_t)params.pictureHeight) {
150             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
151                 __FUNCTION__, mId, mCaptureStreamId);
152             res = device->deleteStream(mCaptureStreamId);
153             if (res == -EBUSY) {
154                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
155                       " after it becomes idle", __FUNCTION__, mId);
156                 return res;
157             } else if (res != OK) {
158                 ALOGE("%s: Camera %d: Unable to delete old output stream "
159                         "for capture: %s (%d)", __FUNCTION__,
160                         mId, strerror(-res), res);
161                 return res;
162             }
163             mCaptureStreamId = NO_STREAM;
164         }
165     }
166 
167     if (mCaptureStreamId == NO_STREAM) {
168         // Create stream for HAL production
169         res = device->createStream(mCaptureWindow,
170                 params.pictureWidth, params.pictureHeight,
171                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
172                 CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId);
173         if (res != OK) {
174             ALOGE("%s: Camera %d: Can't create output stream for capture: "
175                     "%s (%d)", __FUNCTION__, mId,
176                     strerror(-res), res);
177             return res;
178         }
179 
180         res = device->addBufferListenerForStream(mCaptureStreamId, this);
181         if (res != OK) {
182               ALOGE("%s: Camera %d: Can't add buffer listeneri: %s (%d)",
183                     __FUNCTION__, mId, strerror(-res), res);
184               return res;
185         }
186     }
187     return OK;
188 }
189 
deleteStream()190 status_t JpegProcessor::deleteStream() {
191     ATRACE_CALL();
192 
193     Mutex::Autolock l(mInputMutex);
194 
195     if (mCaptureStreamId != NO_STREAM) {
196         sp<CameraDeviceBase> device = mDevice.promote();
197         if (device == 0) {
198             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
199             return INVALID_OPERATION;
200         }
201 
202         device->deleteStream(mCaptureStreamId);
203 
204         mCaptureHeap.clear();
205         mCaptureWindow.clear();
206         mCaptureConsumer.clear();
207 
208         mCaptureStreamId = NO_STREAM;
209     }
210     return OK;
211 }
212 
getStreamId() const213 int JpegProcessor::getStreamId() const {
214     Mutex::Autolock l(mInputMutex);
215     return mCaptureStreamId;
216 }
217 
dump(int,const Vector<String16> &) const218 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
219 }
220 
threadLoop()221 bool JpegProcessor::threadLoop() {
222     status_t res;
223 
224     bool captureSuccess = false;
225     {
226         Mutex::Autolock l(mInputMutex);
227 
228         while (!mCaptureDone) {
229             res = mCaptureDoneSignal.waitRelative(mInputMutex,
230                     kWaitDuration);
231             if (res == TIMED_OUT) return true;
232         }
233 
234         captureSuccess = mCaptureSuccess;
235         mCaptureDone = false;
236     }
237 
238     res = processNewCapture(captureSuccess);
239 
240     return true;
241 }
242 
processNewCapture(bool captureSuccess)243 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
244     ATRACE_CALL();
245     status_t res;
246     sp<Camera2Heap> captureHeap;
247     sp<MemoryBase> captureBuffer;
248 
249     CpuConsumer::LockedBuffer imgBuffer;
250 
251     if (captureSuccess) {
252         Mutex::Autolock l(mInputMutex);
253         if (mCaptureStreamId == NO_STREAM) {
254             ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
255             return INVALID_OPERATION;
256         }
257 
258         res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
259         if (res != OK) {
260             if (res != BAD_VALUE) {
261                 ALOGE("%s: Camera %d: Error receiving still image buffer: "
262                         "%s (%d)", __FUNCTION__,
263                         mId, strerror(-res), res);
264             }
265             return res;
266         }
267 
268         ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
269                 mId);
270 
271         if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
272             ALOGE("%s: Camera %d: Unexpected format for still image: "
273                     "%x, expected %x", __FUNCTION__, mId,
274                     imgBuffer.format,
275                     HAL_PIXEL_FORMAT_BLOB);
276             mCaptureConsumer->unlockBuffer(imgBuffer);
277             return OK;
278         }
279 
280         // Find size of JPEG image
281         size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
282         if (jpegSize == 0) { // failed to find size, default to whole buffer
283             jpegSize = imgBuffer.width;
284         }
285         size_t heapSize = mCaptureHeap->getSize();
286         if (jpegSize > heapSize) {
287             ALOGW("%s: JPEG image is larger than expected, truncating "
288                     "(got %zu, expected at most %zu bytes)",
289                     __FUNCTION__, jpegSize, heapSize);
290             jpegSize = heapSize;
291         }
292 
293         // TODO: Optimize this to avoid memcopy
294         captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
295         void* captureMemory = mCaptureHeap->getBase();
296         memcpy(captureMemory, imgBuffer.data, jpegSize);
297 
298         mCaptureConsumer->unlockBuffer(imgBuffer);
299     }
300 
301     sp<CaptureSequencer> sequencer = mSequencer.promote();
302     if (sequencer != 0) {
303         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
304     }
305 
306     return OK;
307 }
308 
309 /*
310  * JPEG FILE FORMAT OVERVIEW.
311  * http://www.jpeg.org/public/jfif.pdf
312  * (JPEG is the image compression algorithm, actual file format is called JFIF)
313  *
314  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
315  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
316  * (inclusive).  Because every marker begins with the same byte, they are
317  * referred to by the second byte's value.
318  *
319  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
320  * Following it, "segment" sections begin with other markers, followed by a
321  * 2-byte length (in network byte order), then the segment data.
322  *
323  * For our purposes we will ignore the data, and just use the length to skip to
324  * the next segment.  This is necessary because the data inside segments are
325  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
326  * naievely scanning until the end.
327  *
328  * After all the segments are processed, the jpeg compressed image stream begins.
329  * This can be considered an opaque format with one requirement: all 0xFF bytes
330  * in this stream must be followed with a 0x00 byte.  This prevents any of the
331  * image data to be interpreted as a segment.  The only exception to this is at
332  * the end of the image stream there is an End of Image (EOI) marker, which is
333  * 0xFF followed by a non-zero (0xD9) byte.
334  */
335 
336 const uint8_t MARK = 0xFF; // First byte of marker
337 const uint8_t SOI = 0xD8; // Start of Image
338 const uint8_t EOI = 0xD9; // End of Image
339 const size_t MARKER_LENGTH = 2; // length of a marker
340 
341 #pragma pack(push)
342 #pragma pack(1)
343 typedef struct segment {
344     uint8_t marker[MARKER_LENGTH];
345     uint16_t length;
346 } segment_t;
347 #pragma pack(pop)
348 
349 /* HELPER FUNCTIONS */
350 
351 // check for Start of Image marker
checkJpegStart(uint8_t * buf)352 bool checkJpegStart(uint8_t* buf) {
353     return buf[0] == MARK && buf[1] == SOI;
354 }
355 // check for End of Image marker
checkJpegEnd(uint8_t * buf)356 bool checkJpegEnd(uint8_t *buf) {
357     return buf[0] == MARK && buf[1] == EOI;
358 }
359 // check for arbitrary marker, returns marker type (second byte)
360 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)361 uint8_t checkJpegMarker(uint8_t *buf) {
362     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
363         return buf[1];
364     }
365     return 0;
366 }
367 
368 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)369 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
370     size_t size;
371 
372     // First check for JPEG transport header at the end of the buffer
373     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
374     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
375     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
376         size = blob->jpeg_size;
377         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
378             // Verify SOI and EOI markers
379             size_t offset = size - MARKER_LENGTH;
380             uint8_t *end = jpegBuffer + offset;
381             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
382                 ALOGV("Found JPEG transport header, img size %zu", size);
383                 return size;
384             } else {
385                 ALOGW("Found JPEG transport header with bad Image Start/End");
386             }
387         } else {
388             ALOGW("Found JPEG transport header with bad size %zu", size);
389         }
390     }
391 
392     // Check Start of Image
393     if ( !checkJpegStart(jpegBuffer) ) {
394         ALOGE("Could not find start of JPEG marker");
395         return 0;
396     }
397 
398     // Read JFIF segment markers, skip over segment data
399     size = 0;
400     while (size <= maxSize - MARKER_LENGTH) {
401         segment_t *segment = (segment_t*)(jpegBuffer + size);
402         uint8_t type = checkJpegMarker(segment->marker);
403         if (type == 0) { // invalid marker, no more segments, begin JPEG data
404             ALOGV("JPEG stream found beginning at offset %zu", size);
405             break;
406         }
407         if (type == EOI || size > maxSize - sizeof(segment_t)) {
408             ALOGE("Got premature End before JPEG data, offset %zu", size);
409             return 0;
410         }
411         size_t length = ntohs(segment->length);
412         ALOGV("JFIF Segment, type %x length %zx", type, length);
413         size += length + MARKER_LENGTH;
414     }
415 
416     // Find End of Image
417     // Scan JPEG buffer until End of Image (EOI)
418     bool foundEnd = false;
419     for ( ; size <= maxSize - MARKER_LENGTH; size++) {
420         if ( checkJpegEnd(jpegBuffer + size) ) {
421             foundEnd = true;
422             size += MARKER_LENGTH;
423             break;
424         }
425     }
426     if (!foundEnd) {
427         ALOGE("Could not find end of JPEG marker");
428         return 0;
429     }
430 
431     if (size > maxSize) {
432         ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
433         size = maxSize;
434     }
435     ALOGV("Final JPEG size %zu", size);
436     return size;
437 }
438 
439 }; // namespace camera2
440 }; // namespace android
441