• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <netinet/in.h>
22 
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28 
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34 
35 namespace android {
36 namespace camera2 {
37 
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)38 JpegProcessor::JpegProcessor(
39     sp<Camera2Client> client,
40     wp<CaptureSequencer> sequencer):
41         Thread(false),
42         mDevice(client->getCameraDevice()),
43         mSequencer(sequencer),
44         mId(client->getCameraId()),
45         mCaptureAvailable(false),
46         mCaptureStreamId(NO_STREAM) {
47 }
48 
~JpegProcessor()49 JpegProcessor::~JpegProcessor() {
50     ALOGV("%s: Exit", __FUNCTION__);
51     deleteStream();
52 }
53 
onFrameAvailable()54 void JpegProcessor::onFrameAvailable() {
55     Mutex::Autolock l(mInputMutex);
56     if (!mCaptureAvailable) {
57         mCaptureAvailable = true;
58         mCaptureAvailableSignal.signal();
59     }
60 }
61 
updateStream(const Parameters & params)62 status_t JpegProcessor::updateStream(const Parameters &params) {
63     ATRACE_CALL();
64     ALOGV("%s", __FUNCTION__);
65     status_t res;
66 
67     Mutex::Autolock l(mInputMutex);
68 
69     sp<CameraDeviceBase> device = mDevice.promote();
70     if (device == 0) {
71         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
72         return INVALID_OPERATION;
73     }
74 
75     // Find out buffer size for JPEG
76     ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
77     if (maxJpegSize <= 0) {
78         ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
79                 __FUNCTION__, mId, maxJpegSize);
80         return INVALID_OPERATION;
81     }
82 
83     if (mCaptureConsumer == 0) {
84         // Create CPU buffer queue endpoint
85         sp<IGraphicBufferProducer> producer;
86         sp<IGraphicBufferConsumer> consumer;
87         BufferQueue::createBufferQueue(&producer, &consumer);
88         mCaptureConsumer = new CpuConsumer(consumer, 1);
89         mCaptureConsumer->setFrameAvailableListener(this);
90         mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
91         mCaptureWindow = new Surface(producer);
92     }
93 
94     // Since ashmem heaps are rounded up to page size, don't reallocate if
95     // the capture heap isn't exactly the same size as the required JPEG buffer
96     const size_t HEAP_SLACK_FACTOR = 2;
97     if (mCaptureHeap == 0 ||
98             (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
99             (mCaptureHeap->getSize() >
100                     static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
101         // Create memory for API consumption
102         mCaptureHeap.clear();
103         mCaptureHeap =
104                 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
105         if (mCaptureHeap->getSize() == 0) {
106             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
107                     __FUNCTION__, mId);
108             return NO_MEMORY;
109         }
110     }
111     ALOGV("%s: Camera %d: JPEG capture heap now %d bytes; requested %d bytes",
112             __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
113 
114     if (mCaptureStreamId != NO_STREAM) {
115         // Check if stream parameters have to change
116         uint32_t currentWidth, currentHeight;
117         res = device->getStreamInfo(mCaptureStreamId,
118                 &currentWidth, &currentHeight, 0);
119         if (res != OK) {
120             ALOGE("%s: Camera %d: Error querying capture output stream info: "
121                     "%s (%d)", __FUNCTION__,
122                     mId, strerror(-res), res);
123             return res;
124         }
125         if (currentWidth != (uint32_t)params.pictureWidth ||
126                 currentHeight != (uint32_t)params.pictureHeight) {
127             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
128                 __FUNCTION__, mId, mCaptureStreamId);
129             res = device->deleteStream(mCaptureStreamId);
130             if (res == -EBUSY) {
131                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
132                       " after it becomes idle", __FUNCTION__, mId);
133                 return res;
134             } else if (res != OK) {
135                 ALOGE("%s: Camera %d: Unable to delete old output stream "
136                         "for capture: %s (%d)", __FUNCTION__,
137                         mId, strerror(-res), res);
138                 return res;
139             }
140             mCaptureStreamId = NO_STREAM;
141         }
142     }
143 
144     if (mCaptureStreamId == NO_STREAM) {
145         // Create stream for HAL production
146         res = device->createStream(mCaptureWindow,
147                 params.pictureWidth, params.pictureHeight,
148                 HAL_PIXEL_FORMAT_BLOB, &mCaptureStreamId);
149         if (res != OK) {
150             ALOGE("%s: Camera %d: Can't create output stream for capture: "
151                     "%s (%d)", __FUNCTION__, mId,
152                     strerror(-res), res);
153             return res;
154         }
155 
156     }
157     return OK;
158 }
159 
deleteStream()160 status_t JpegProcessor::deleteStream() {
161     ATRACE_CALL();
162 
163     Mutex::Autolock l(mInputMutex);
164 
165     if (mCaptureStreamId != NO_STREAM) {
166         sp<CameraDeviceBase> device = mDevice.promote();
167         if (device == 0) {
168             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
169             return INVALID_OPERATION;
170         }
171 
172         device->deleteStream(mCaptureStreamId);
173 
174         mCaptureHeap.clear();
175         mCaptureWindow.clear();
176         mCaptureConsumer.clear();
177 
178         mCaptureStreamId = NO_STREAM;
179     }
180     return OK;
181 }
182 
getStreamId() const183 int JpegProcessor::getStreamId() const {
184     Mutex::Autolock l(mInputMutex);
185     return mCaptureStreamId;
186 }
187 
dump(int,const Vector<String16> &) const188 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
189 }
190 
threadLoop()191 bool JpegProcessor::threadLoop() {
192     status_t res;
193 
194     {
195         Mutex::Autolock l(mInputMutex);
196         while (!mCaptureAvailable) {
197             res = mCaptureAvailableSignal.waitRelative(mInputMutex,
198                     kWaitDuration);
199             if (res == TIMED_OUT) return true;
200         }
201         mCaptureAvailable = false;
202     }
203 
204     do {
205         res = processNewCapture();
206     } while (res == OK);
207 
208     return true;
209 }
210 
processNewCapture()211 status_t JpegProcessor::processNewCapture() {
212     ATRACE_CALL();
213     status_t res;
214     sp<Camera2Heap> captureHeap;
215     sp<MemoryBase> captureBuffer;
216 
217     CpuConsumer::LockedBuffer imgBuffer;
218 
219     {
220         Mutex::Autolock l(mInputMutex);
221         if (mCaptureStreamId == NO_STREAM) {
222             ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
223             return INVALID_OPERATION;
224         }
225 
226         res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
227         if (res != OK) {
228             if (res != BAD_VALUE) {
229                 ALOGE("%s: Camera %d: Error receiving still image buffer: "
230                         "%s (%d)", __FUNCTION__,
231                         mId, strerror(-res), res);
232             }
233             return res;
234         }
235 
236         ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
237                 mId);
238 
239         if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
240             ALOGE("%s: Camera %d: Unexpected format for still image: "
241                     "%x, expected %x", __FUNCTION__, mId,
242                     imgBuffer.format,
243                     HAL_PIXEL_FORMAT_BLOB);
244             mCaptureConsumer->unlockBuffer(imgBuffer);
245             return OK;
246         }
247 
248         // Find size of JPEG image
249         size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
250         if (jpegSize == 0) { // failed to find size, default to whole buffer
251             jpegSize = imgBuffer.width;
252         }
253         size_t heapSize = mCaptureHeap->getSize();
254         if (jpegSize > heapSize) {
255             ALOGW("%s: JPEG image is larger than expected, truncating "
256                     "(got %zu, expected at most %zu bytes)",
257                     __FUNCTION__, jpegSize, heapSize);
258             jpegSize = heapSize;
259         }
260 
261         // TODO: Optimize this to avoid memcopy
262         captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
263         void* captureMemory = mCaptureHeap->getBase();
264         memcpy(captureMemory, imgBuffer.data, jpegSize);
265 
266         mCaptureConsumer->unlockBuffer(imgBuffer);
267     }
268 
269     sp<CaptureSequencer> sequencer = mSequencer.promote();
270     if (sequencer != 0) {
271         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
272     }
273 
274     return OK;
275 }
276 
277 /*
278  * JPEG FILE FORMAT OVERVIEW.
279  * http://www.jpeg.org/public/jfif.pdf
280  * (JPEG is the image compression algorithm, actual file format is called JFIF)
281  *
282  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
283  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
284  * (inclusive).  Because every marker begins with the same byte, they are
285  * referred to by the second byte's value.
286  *
287  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
288  * Following it, "segment" sections begin with other markers, followed by a
289  * 2-byte length (in network byte order), then the segment data.
290  *
291  * For our purposes we will ignore the data, and just use the length to skip to
292  * the next segment.  This is necessary because the data inside segments are
293  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
294  * naievely scanning until the end.
295  *
296  * After all the segments are processed, the jpeg compressed image stream begins.
297  * This can be considered an opaque format with one requirement: all 0xFF bytes
298  * in this stream must be followed with a 0x00 byte.  This prevents any of the
299  * image data to be interpreted as a segment.  The only exception to this is at
300  * the end of the image stream there is an End of Image (EOI) marker, which is
301  * 0xFF followed by a non-zero (0xD9) byte.
302  */
303 
304 const uint8_t MARK = 0xFF; // First byte of marker
305 const uint8_t SOI = 0xD8; // Start of Image
306 const uint8_t EOI = 0xD9; // End of Image
307 const size_t MARKER_LENGTH = 2; // length of a marker
308 
309 #pragma pack(push)
310 #pragma pack(1)
311 typedef struct segment {
312     uint8_t marker[MARKER_LENGTH];
313     uint16_t length;
314 } segment_t;
315 #pragma pack(pop)
316 
317 /* HELPER FUNCTIONS */
318 
319 // check for Start of Image marker
checkJpegStart(uint8_t * buf)320 bool checkJpegStart(uint8_t* buf) {
321     return buf[0] == MARK && buf[1] == SOI;
322 }
323 // check for End of Image marker
checkJpegEnd(uint8_t * buf)324 bool checkJpegEnd(uint8_t *buf) {
325     return buf[0] == MARK && buf[1] == EOI;
326 }
327 // check for arbitrary marker, returns marker type (second byte)
328 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)329 uint8_t checkJpegMarker(uint8_t *buf) {
330     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
331         return buf[1];
332     }
333     return 0;
334 }
335 
336 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)337 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
338     size_t size;
339 
340     // First check for JPEG transport header at the end of the buffer
341     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
342     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
343     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
344         size = blob->jpeg_size;
345         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
346             // Verify SOI and EOI markers
347             size_t offset = size - MARKER_LENGTH;
348             uint8_t *end = jpegBuffer + offset;
349             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
350                 ALOGV("Found JPEG transport header, img size %zu", size);
351                 return size;
352             } else {
353                 ALOGW("Found JPEG transport header with bad Image Start/End");
354             }
355         } else {
356             ALOGW("Found JPEG transport header with bad size %zu", size);
357         }
358     }
359 
360     // Check Start of Image
361     if ( !checkJpegStart(jpegBuffer) ) {
362         ALOGE("Could not find start of JPEG marker");
363         return 0;
364     }
365 
366     // Read JFIF segment markers, skip over segment data
367     size = 0;
368     while (size <= maxSize - MARKER_LENGTH) {
369         segment_t *segment = (segment_t*)(jpegBuffer + size);
370         uint8_t type = checkJpegMarker(segment->marker);
371         if (type == 0) { // invalid marker, no more segments, begin JPEG data
372             ALOGV("JPEG stream found beginning at offset %zu", size);
373             break;
374         }
375         if (type == EOI || size > maxSize - sizeof(segment_t)) {
376             ALOGE("Got premature End before JPEG data, offset %zu", size);
377             return 0;
378         }
379         size_t length = ntohs(segment->length);
380         ALOGV("JFIF Segment, type %x length %zx", type, length);
381         size += length + MARKER_LENGTH;
382     }
383 
384     // Find End of Image
385     // Scan JPEG buffer until End of Image (EOI)
386     bool foundEnd = false;
387     for ( ; size <= maxSize - MARKER_LENGTH; size++) {
388         if ( checkJpegEnd(jpegBuffer + size) ) {
389             foundEnd = true;
390             size += MARKER_LENGTH;
391             break;
392         }
393     }
394     if (!foundEnd) {
395         ALOGE("Could not find end of JPEG marker");
396         return 0;
397     }
398 
399     if (size > maxSize) {
400         ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
401         size = maxSize;
402     }
403     ALOGV("Final JPEG size %zu", size);
404     return size;
405 }
406 
407 }; // namespace camera2
408 }; // namespace android
409