1 /*
2 * Copyright (C) 2012-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <netinet/in.h>
22
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34
35 namespace android {
36 namespace camera2 {
37
38 using android::camera3::CAMERA_STREAM_ROTATION_0;
39
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)40 JpegProcessor::JpegProcessor(
41 sp<Camera2Client> client,
42 wp<CaptureSequencer> sequencer):
43 Thread(false),
44 mDevice(client->getCameraDevice()),
45 mSequencer(sequencer),
46 mId(client->getCameraId()),
47 mCaptureDone(false),
48 mCaptureSuccess(false),
49 mCaptureStreamId(NO_STREAM) {
50 }
51
~JpegProcessor()52 JpegProcessor::~JpegProcessor() {
53 ALOGV("%s: Exit", __FUNCTION__);
54 deleteStream();
55 }
56
onFrameAvailable(const BufferItem &)57 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
58 Mutex::Autolock l(mInputMutex);
59 ALOGV("%s", __FUNCTION__);
60 if (!mCaptureDone) {
61 mCaptureDone = true;
62 mCaptureSuccess = true;
63 mCaptureDoneSignal.signal();
64 }
65 }
66
updateStream(const Parameters & params)67 status_t JpegProcessor::updateStream(const Parameters ¶ms) {
68 ATRACE_CALL();
69 ALOGV("%s", __FUNCTION__);
70 status_t res;
71
72 Mutex::Autolock l(mInputMutex);
73
74 sp<CameraDeviceBase> device = mDevice.promote();
75 if (device == 0) {
76 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
77 return INVALID_OPERATION;
78 }
79
80 // Find out buffer size for JPEG
81 ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
82 if (maxJpegSize <= 0) {
83 ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
84 __FUNCTION__, mId, maxJpegSize);
85 return INVALID_OPERATION;
86 }
87
88 if (mCaptureConsumer == 0) {
89 // Create CPU buffer queue endpoint
90 sp<IGraphicBufferProducer> producer;
91 sp<IGraphicBufferConsumer> consumer;
92 BufferQueue::createBufferQueue(&producer, &consumer);
93 mCaptureConsumer = new CpuConsumer(consumer, 1);
94 mCaptureConsumer->setFrameAvailableListener(this);
95 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
96 mCaptureWindow = new Surface(producer);
97 }
98
99 // Since ashmem heaps are rounded up to page size, don't reallocate if
100 // the capture heap isn't exactly the same size as the required JPEG buffer
101 const size_t HEAP_SLACK_FACTOR = 2;
102 if (mCaptureHeap == 0 ||
103 (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
104 (mCaptureHeap->getSize() >
105 static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
106 // Create memory for API consumption
107 mCaptureHeap.clear();
108 mCaptureHeap =
109 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
110 if (mCaptureHeap->getSize() == 0) {
111 ALOGE("%s: Camera %d: Unable to allocate memory for capture",
112 __FUNCTION__, mId);
113 return NO_MEMORY;
114 }
115 }
116 ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
117 __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
118
119 if (mCaptureStreamId != NO_STREAM) {
120 // Check if stream parameters have to change
121 CameraDeviceBase::StreamInfo streamInfo;
122 res = device->getStreamInfo(mCaptureStreamId, &streamInfo);
123 if (res != OK) {
124 ALOGE("%s: Camera %d: Error querying capture output stream info: "
125 "%s (%d)", __FUNCTION__,
126 mId, strerror(-res), res);
127 return res;
128 }
129 if (streamInfo.width != (uint32_t)params.pictureWidth ||
130 streamInfo.height != (uint32_t)params.pictureHeight) {
131 ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
132 __FUNCTION__, mId, mCaptureStreamId);
133 res = device->deleteStream(mCaptureStreamId);
134 if (res == -EBUSY) {
135 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
136 " after it becomes idle", __FUNCTION__, mId);
137 return res;
138 } else if (res != OK) {
139 ALOGE("%s: Camera %d: Unable to delete old output stream "
140 "for capture: %s (%d)", __FUNCTION__,
141 mId, strerror(-res), res);
142 return res;
143 }
144 mCaptureStreamId = NO_STREAM;
145 }
146 }
147
148 if (mCaptureStreamId == NO_STREAM) {
149 // Create stream for HAL production
150 res = device->createStream(mCaptureWindow,
151 params.pictureWidth, params.pictureHeight,
152 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
153 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
154 String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
155 if (res != OK) {
156 ALOGE("%s: Camera %d: Can't create output stream for capture: "
157 "%s (%d)", __FUNCTION__, mId,
158 strerror(-res), res);
159 return res;
160 }
161 }
162 return OK;
163 }
164
deleteStream()165 status_t JpegProcessor::deleteStream() {
166 ATRACE_CALL();
167
168 Mutex::Autolock l(mInputMutex);
169
170 if (mCaptureStreamId != NO_STREAM) {
171 sp<CameraDeviceBase> device = mDevice.promote();
172 if (device == 0) {
173 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
174 return INVALID_OPERATION;
175 }
176
177 status_t res = device->deleteStream(mCaptureStreamId);
178 if (res != OK) {
179 ALOGE("%s: delete stream %d failed!", __FUNCTION__, mCaptureStreamId);
180 return res;
181 }
182
183 mCaptureHeap.clear();
184 mCaptureWindow.clear();
185 mCaptureConsumer.clear();
186
187 mCaptureStreamId = NO_STREAM;
188 }
189 return OK;
190 }
191
getStreamId() const192 int JpegProcessor::getStreamId() const {
193 Mutex::Autolock l(mInputMutex);
194 return mCaptureStreamId;
195 }
196
dump(int,const Vector<String16> &) const197 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
198 }
199
threadLoop()200 bool JpegProcessor::threadLoop() {
201 status_t res;
202
203 bool captureSuccess = false;
204 {
205 Mutex::Autolock l(mInputMutex);
206
207 while (!mCaptureDone) {
208 res = mCaptureDoneSignal.waitRelative(mInputMutex,
209 kWaitDuration);
210 if (res == TIMED_OUT) return true;
211 }
212
213 captureSuccess = mCaptureSuccess;
214 mCaptureDone = false;
215 }
216
217 res = processNewCapture(captureSuccess);
218
219 return true;
220 }
221
processNewCapture(bool captureSuccess)222 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
223 ATRACE_CALL();
224 status_t res;
225 sp<Camera2Heap> captureHeap;
226 sp<MemoryBase> captureBuffer;
227
228 CpuConsumer::LockedBuffer imgBuffer;
229
230 if (captureSuccess) {
231 Mutex::Autolock l(mInputMutex);
232 if (mCaptureStreamId == NO_STREAM) {
233 ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
234 return INVALID_OPERATION;
235 }
236
237 res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
238 if (res != OK) {
239 if (res != BAD_VALUE) {
240 ALOGE("%s: Camera %d: Error receiving still image buffer: "
241 "%s (%d)", __FUNCTION__,
242 mId, strerror(-res), res);
243 }
244 return res;
245 }
246
247 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
248 mId);
249
250 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
251 ALOGE("%s: Camera %d: Unexpected format for still image: "
252 "%x, expected %x", __FUNCTION__, mId,
253 imgBuffer.format,
254 HAL_PIXEL_FORMAT_BLOB);
255 mCaptureConsumer->unlockBuffer(imgBuffer);
256 return OK;
257 }
258
259 // Find size of JPEG image
260 size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
261 if (jpegSize == 0) { // failed to find size, default to whole buffer
262 jpegSize = imgBuffer.width;
263 }
264 size_t heapSize = mCaptureHeap->getSize();
265 if (jpegSize > heapSize) {
266 ALOGW("%s: JPEG image is larger than expected, truncating "
267 "(got %zu, expected at most %zu bytes)",
268 __FUNCTION__, jpegSize, heapSize);
269 jpegSize = heapSize;
270 }
271
272 // TODO: Optimize this to avoid memcopy
273 captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
274 void* captureMemory = mCaptureHeap->getBase();
275 memcpy(captureMemory, imgBuffer.data, jpegSize);
276
277 mCaptureConsumer->unlockBuffer(imgBuffer);
278 }
279
280 sp<CaptureSequencer> sequencer = mSequencer.promote();
281 if (sequencer != 0) {
282 sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
283 }
284
285 return OK;
286 }
287
288 /*
289 * JPEG FILE FORMAT OVERVIEW.
290 * http://www.jpeg.org/public/jfif.pdf
291 * (JPEG is the image compression algorithm, actual file format is called JFIF)
292 *
293 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The
294 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
295 * (inclusive). Because every marker begins with the same byte, they are
296 * referred to by the second byte's value.
297 *
298 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
299 * Following it, "segment" sections begin with other markers, followed by a
300 * 2-byte length (in network byte order), then the segment data.
301 *
302 * For our purposes we will ignore the data, and just use the length to skip to
303 * the next segment. This is necessary because the data inside segments are
304 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
305 * naievely scanning until the end.
306 *
307 * After all the segments are processed, the jpeg compressed image stream begins.
308 * This can be considered an opaque format with one requirement: all 0xFF bytes
309 * in this stream must be followed with a 0x00 byte. This prevents any of the
310 * image data to be interpreted as a segment. The only exception to this is at
311 * the end of the image stream there is an End of Image (EOI) marker, which is
312 * 0xFF followed by a non-zero (0xD9) byte.
313 */
314
315 const uint8_t MARK = 0xFF; // First byte of marker
316 const uint8_t SOI = 0xD8; // Start of Image
317 const uint8_t EOI = 0xD9; // End of Image
318 const size_t MARKER_LENGTH = 2; // length of a marker
319
320 #pragma pack(push)
321 #pragma pack(1)
322 typedef struct segment {
323 uint8_t marker[MARKER_LENGTH];
324 uint16_t length;
325 } segment_t;
326 #pragma pack(pop)
327
328 /* HELPER FUNCTIONS */
329
330 // check for Start of Image marker
checkJpegStart(uint8_t * buf)331 bool checkJpegStart(uint8_t* buf) {
332 return buf[0] == MARK && buf[1] == SOI;
333 }
334 // check for End of Image marker
checkJpegEnd(uint8_t * buf)335 bool checkJpegEnd(uint8_t *buf) {
336 return buf[0] == MARK && buf[1] == EOI;
337 }
338 // check for arbitrary marker, returns marker type (second byte)
339 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)340 uint8_t checkJpegMarker(uint8_t *buf) {
341 if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
342 return buf[1];
343 }
344 return 0;
345 }
346
347 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)348 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
349 size_t size;
350
351 // First check for JPEG transport header at the end of the buffer
352 uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
353 struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
354 if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
355 size = blob->jpeg_size;
356 if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
357 // Verify SOI and EOI markers
358 size_t offset = size - MARKER_LENGTH;
359 uint8_t *end = jpegBuffer + offset;
360 if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
361 ALOGV("Found JPEG transport header, img size %zu", size);
362 return size;
363 } else {
364 ALOGW("Found JPEG transport header with bad Image Start/End");
365 }
366 } else {
367 ALOGW("Found JPEG transport header with bad size %zu", size);
368 }
369 }
370
371 // Check Start of Image
372 if ( !checkJpegStart(jpegBuffer) ) {
373 ALOGE("Could not find start of JPEG marker");
374 return 0;
375 }
376
377 // Read JFIF segment markers, skip over segment data
378 size = MARKER_LENGTH; //jump SOI;
379 while (size <= maxSize - MARKER_LENGTH) {
380 segment_t *segment = (segment_t*)(jpegBuffer + size);
381 uint8_t type = checkJpegMarker(segment->marker);
382 if (type == 0) { // invalid marker, no more segments, begin JPEG data
383 ALOGV("JPEG stream found beginning at offset %zu", size);
384 break;
385 }
386 if (type == EOI || size > maxSize - sizeof(segment_t)) {
387 ALOGE("Got premature End before JPEG data, offset %zu", size);
388 return 0;
389 }
390 size_t length = ntohs(segment->length);
391 ALOGV("JFIF Segment, type %x length %zx", type, length);
392 size += length + MARKER_LENGTH;
393 }
394
395 // Find End of Image
396 // Scan JPEG buffer until End of Image (EOI)
397 bool foundEnd = false;
398 for ( ; size <= maxSize - MARKER_LENGTH; size++) {
399 if ( checkJpegEnd(jpegBuffer + size) ) {
400 foundEnd = true;
401 size += MARKER_LENGTH;
402 break;
403 }
404 }
405 if (!foundEnd) {
406 ALOGE("Could not find end of JPEG marker");
407 return 0;
408 }
409
410 if (size > maxSize) {
411 ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
412 size = maxSize;
413 }
414 ALOGV("Final JPEG size %zu", size);
415 return size;
416 }
417
418 }; // namespace camera2
419 }; // namespace android
420