1 /*
2 * Copyright (C) 2012-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <netinet/in.h>
22
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34
35 namespace android {
36 namespace camera2 {
37
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)38 JpegProcessor::JpegProcessor(
39 sp<Camera2Client> client,
40 wp<CaptureSequencer> sequencer):
41 Thread(false),
42 mDevice(client->getCameraDevice()),
43 mSequencer(sequencer),
44 mId(client->getCameraId()),
45 mCaptureDone(false),
46 mCaptureSuccess(false),
47 mCaptureStreamId(NO_STREAM) {
48 }
49
~JpegProcessor()50 JpegProcessor::~JpegProcessor() {
51 ALOGV("%s: Exit", __FUNCTION__);
52 deleteStream();
53 }
54
onFrameAvailable(const BufferItem &)55 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
56 Mutex::Autolock l(mInputMutex);
57 ALOGV("%s", __FUNCTION__);
58 if (!mCaptureDone) {
59 mCaptureDone = true;
60 mCaptureSuccess = true;
61 mCaptureDoneSignal.signal();
62 }
63 }
64
updateStream(const Parameters & params)65 status_t JpegProcessor::updateStream(const Parameters ¶ms) {
66 ATRACE_CALL();
67 ALOGV("%s", __FUNCTION__);
68 status_t res;
69
70 Mutex::Autolock l(mInputMutex);
71
72 sp<CameraDeviceBase> device = mDevice.promote();
73 if (device == 0) {
74 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
75 return INVALID_OPERATION;
76 }
77
78 // Find out buffer size for JPEG
79 ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
80 if (maxJpegSize <= 0) {
81 ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
82 __FUNCTION__, mId, maxJpegSize);
83 return INVALID_OPERATION;
84 }
85
86 if (mCaptureConsumer == 0) {
87 // Create CPU buffer queue endpoint
88 sp<IGraphicBufferProducer> producer;
89 sp<IGraphicBufferConsumer> consumer;
90 BufferQueue::createBufferQueue(&producer, &consumer);
91 mCaptureConsumer = new CpuConsumer(consumer, 1);
92 mCaptureConsumer->setFrameAvailableListener(this);
93 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
94 mCaptureWindow = new Surface(producer);
95 }
96
97 // Since ashmem heaps are rounded up to page size, don't reallocate if
98 // the capture heap isn't exactly the same size as the required JPEG buffer
99 const size_t HEAP_SLACK_FACTOR = 2;
100 if (mCaptureHeap == 0 ||
101 (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
102 (mCaptureHeap->getSize() >
103 static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
104 // Create memory for API consumption
105 mCaptureHeap.clear();
106 mCaptureHeap =
107 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
108 if (mCaptureHeap->getSize() == 0) {
109 ALOGE("%s: Camera %d: Unable to allocate memory for capture",
110 __FUNCTION__, mId);
111 return NO_MEMORY;
112 }
113 }
114 ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
115 __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
116
117 if (mCaptureStreamId != NO_STREAM) {
118 // Check if stream parameters have to change
119 CameraDeviceBase::StreamInfo streamInfo;
120 res = device->getStreamInfo(mCaptureStreamId, &streamInfo);
121 if (res != OK) {
122 ALOGE("%s: Camera %d: Error querying capture output stream info: "
123 "%s (%d)", __FUNCTION__,
124 mId, strerror(-res), res);
125 return res;
126 }
127 if (streamInfo.width != (uint32_t)params.pictureWidth ||
128 streamInfo.height != (uint32_t)params.pictureHeight) {
129 ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
130 __FUNCTION__, mId, mCaptureStreamId);
131 res = device->deleteStream(mCaptureStreamId);
132 if (res == -EBUSY) {
133 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
134 " after it becomes idle", __FUNCTION__, mId);
135 return res;
136 } else if (res != OK) {
137 ALOGE("%s: Camera %d: Unable to delete old output stream "
138 "for capture: %s (%d)", __FUNCTION__,
139 mId, strerror(-res), res);
140 return res;
141 }
142 mCaptureStreamId = NO_STREAM;
143 }
144 }
145
146 if (mCaptureStreamId == NO_STREAM) {
147 // Create stream for HAL production
148 res = device->createStream(mCaptureWindow,
149 params.pictureWidth, params.pictureHeight,
150 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
151 CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId,
152 String8());
153 if (res != OK) {
154 ALOGE("%s: Camera %d: Can't create output stream for capture: "
155 "%s (%d)", __FUNCTION__, mId,
156 strerror(-res), res);
157 return res;
158 }
159 }
160 return OK;
161 }
162
deleteStream()163 status_t JpegProcessor::deleteStream() {
164 ATRACE_CALL();
165
166 Mutex::Autolock l(mInputMutex);
167
168 if (mCaptureStreamId != NO_STREAM) {
169 sp<CameraDeviceBase> device = mDevice.promote();
170 if (device == 0) {
171 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
172 return INVALID_OPERATION;
173 }
174
175 status_t res = device->deleteStream(mCaptureStreamId);
176 if (res != OK) {
177 ALOGE("%s: delete stream %d failed!", __FUNCTION__, mCaptureStreamId);
178 return res;
179 }
180
181 mCaptureHeap.clear();
182 mCaptureWindow.clear();
183 mCaptureConsumer.clear();
184
185 mCaptureStreamId = NO_STREAM;
186 }
187 return OK;
188 }
189
getStreamId() const190 int JpegProcessor::getStreamId() const {
191 Mutex::Autolock l(mInputMutex);
192 return mCaptureStreamId;
193 }
194
dump(int,const Vector<String16> &) const195 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
196 }
197
threadLoop()198 bool JpegProcessor::threadLoop() {
199 status_t res;
200
201 bool captureSuccess = false;
202 {
203 Mutex::Autolock l(mInputMutex);
204
205 while (!mCaptureDone) {
206 res = mCaptureDoneSignal.waitRelative(mInputMutex,
207 kWaitDuration);
208 if (res == TIMED_OUT) return true;
209 }
210
211 captureSuccess = mCaptureSuccess;
212 mCaptureDone = false;
213 }
214
215 res = processNewCapture(captureSuccess);
216
217 return true;
218 }
219
processNewCapture(bool captureSuccess)220 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
221 ATRACE_CALL();
222 status_t res;
223 sp<Camera2Heap> captureHeap;
224 sp<MemoryBase> captureBuffer;
225
226 CpuConsumer::LockedBuffer imgBuffer;
227
228 if (captureSuccess) {
229 Mutex::Autolock l(mInputMutex);
230 if (mCaptureStreamId == NO_STREAM) {
231 ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
232 return INVALID_OPERATION;
233 }
234
235 res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
236 if (res != OK) {
237 if (res != BAD_VALUE) {
238 ALOGE("%s: Camera %d: Error receiving still image buffer: "
239 "%s (%d)", __FUNCTION__,
240 mId, strerror(-res), res);
241 }
242 return res;
243 }
244
245 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
246 mId);
247
248 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
249 ALOGE("%s: Camera %d: Unexpected format for still image: "
250 "%x, expected %x", __FUNCTION__, mId,
251 imgBuffer.format,
252 HAL_PIXEL_FORMAT_BLOB);
253 mCaptureConsumer->unlockBuffer(imgBuffer);
254 return OK;
255 }
256
257 // Find size of JPEG image
258 size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
259 if (jpegSize == 0) { // failed to find size, default to whole buffer
260 jpegSize = imgBuffer.width;
261 }
262 size_t heapSize = mCaptureHeap->getSize();
263 if (jpegSize > heapSize) {
264 ALOGW("%s: JPEG image is larger than expected, truncating "
265 "(got %zu, expected at most %zu bytes)",
266 __FUNCTION__, jpegSize, heapSize);
267 jpegSize = heapSize;
268 }
269
270 // TODO: Optimize this to avoid memcopy
271 captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
272 void* captureMemory = mCaptureHeap->getBase();
273 memcpy(captureMemory, imgBuffer.data, jpegSize);
274
275 mCaptureConsumer->unlockBuffer(imgBuffer);
276 }
277
278 sp<CaptureSequencer> sequencer = mSequencer.promote();
279 if (sequencer != 0) {
280 sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
281 }
282
283 return OK;
284 }
285
286 /*
287 * JPEG FILE FORMAT OVERVIEW.
288 * http://www.jpeg.org/public/jfif.pdf
289 * (JPEG is the image compression algorithm, actual file format is called JFIF)
290 *
291 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The
292 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
293 * (inclusive). Because every marker begins with the same byte, they are
294 * referred to by the second byte's value.
295 *
296 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
297 * Following it, "segment" sections begin with other markers, followed by a
298 * 2-byte length (in network byte order), then the segment data.
299 *
300 * For our purposes we will ignore the data, and just use the length to skip to
301 * the next segment. This is necessary because the data inside segments are
302 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
303 * naievely scanning until the end.
304 *
305 * After all the segments are processed, the jpeg compressed image stream begins.
306 * This can be considered an opaque format with one requirement: all 0xFF bytes
307 * in this stream must be followed with a 0x00 byte. This prevents any of the
308 * image data to be interpreted as a segment. The only exception to this is at
309 * the end of the image stream there is an End of Image (EOI) marker, which is
310 * 0xFF followed by a non-zero (0xD9) byte.
311 */
312
313 const uint8_t MARK = 0xFF; // First byte of marker
314 const uint8_t SOI = 0xD8; // Start of Image
315 const uint8_t EOI = 0xD9; // End of Image
316 const size_t MARKER_LENGTH = 2; // length of a marker
317
318 #pragma pack(push)
319 #pragma pack(1)
320 typedef struct segment {
321 uint8_t marker[MARKER_LENGTH];
322 uint16_t length;
323 } segment_t;
324 #pragma pack(pop)
325
326 /* HELPER FUNCTIONS */
327
328 // check for Start of Image marker
checkJpegStart(uint8_t * buf)329 bool checkJpegStart(uint8_t* buf) {
330 return buf[0] == MARK && buf[1] == SOI;
331 }
332 // check for End of Image marker
checkJpegEnd(uint8_t * buf)333 bool checkJpegEnd(uint8_t *buf) {
334 return buf[0] == MARK && buf[1] == EOI;
335 }
336 // check for arbitrary marker, returns marker type (second byte)
337 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)338 uint8_t checkJpegMarker(uint8_t *buf) {
339 if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
340 return buf[1];
341 }
342 return 0;
343 }
344
345 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)346 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
347 size_t size;
348
349 // First check for JPEG transport header at the end of the buffer
350 uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
351 struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
352 if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
353 size = blob->jpeg_size;
354 if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
355 // Verify SOI and EOI markers
356 size_t offset = size - MARKER_LENGTH;
357 uint8_t *end = jpegBuffer + offset;
358 if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
359 ALOGV("Found JPEG transport header, img size %zu", size);
360 return size;
361 } else {
362 ALOGW("Found JPEG transport header with bad Image Start/End");
363 }
364 } else {
365 ALOGW("Found JPEG transport header with bad size %zu", size);
366 }
367 }
368
369 // Check Start of Image
370 if ( !checkJpegStart(jpegBuffer) ) {
371 ALOGE("Could not find start of JPEG marker");
372 return 0;
373 }
374
375 // Read JFIF segment markers, skip over segment data
376 size = MARKER_LENGTH; //jump SOI;
377 while (size <= maxSize - MARKER_LENGTH) {
378 segment_t *segment = (segment_t*)(jpegBuffer + size);
379 uint8_t type = checkJpegMarker(segment->marker);
380 if (type == 0) { // invalid marker, no more segments, begin JPEG data
381 ALOGV("JPEG stream found beginning at offset %zu", size);
382 break;
383 }
384 if (type == EOI || size > maxSize - sizeof(segment_t)) {
385 ALOGE("Got premature End before JPEG data, offset %zu", size);
386 return 0;
387 }
388 size_t length = ntohs(segment->length);
389 ALOGV("JFIF Segment, type %x length %zx", type, length);
390 size += length + MARKER_LENGTH;
391 }
392
393 // Find End of Image
394 // Scan JPEG buffer until End of Image (EOI)
395 bool foundEnd = false;
396 for ( ; size <= maxSize - MARKER_LENGTH; size++) {
397 if ( checkJpegEnd(jpegBuffer + size) ) {
398 foundEnd = true;
399 size += MARKER_LENGTH;
400 break;
401 }
402 }
403 if (!foundEnd) {
404 ALOGE("Could not find end of JPEG marker");
405 return 0;
406 }
407
408 if (size > maxSize) {
409 ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
410 size = maxSize;
411 }
412 ALOGV("Final JPEG size %zu", size);
413 return size;
414 }
415
416 }; // namespace camera2
417 }; // namespace android
418