1 /*
2 * Copyright (C) 2013-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <ctime>
22 #include <fstream>
23
24 #include <android-base/unique_fd.h>
25 #include <ui/GraphicBuffer.h>
26 #include <utils/Log.h>
27 #include <utils/Trace.h>
28
29 #include "api1/client2/JpegProcessor.h"
30 #include "Camera3OutputStream.h"
31 #include "utils/TraceHFR.h"
32
33 #ifndef container_of
34 #define container_of(ptr, type, member) \
35 (type *)((char*)(ptr) - offsetof(type, member))
36 #endif
37
38 namespace android {
39
40 namespace camera3 {
41
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)42 Camera3OutputStream::Camera3OutputStream(int id,
43 sp<Surface> consumer,
44 uint32_t width, uint32_t height, int format,
45 android_dataspace dataSpace, camera_stream_rotation_t rotation,
46 nsecs_t timestampOffset, const String8& physicalCameraId,
47 const std::unordered_set<int32_t> &sensorPixelModesUsed,
48 int setId, bool isMultiResolution) :
49 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
50 /*maxSize*/0, format, dataSpace, rotation,
51 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
52 mConsumer(consumer),
53 mTransform(0),
54 mTraceFirstBuffer(true),
55 mUseBufferManager(false),
56 mTimestampOffset(timestampOffset),
57 mConsumerUsage(0),
58 mDropBuffers(false),
59 mDequeueBufferLatency(kDequeueLatencyBinSize) {
60
61 if (mConsumer == NULL) {
62 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
63 mState = STATE_ERROR;
64 }
65
66 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
67 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
68 }
69
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)70 Camera3OutputStream::Camera3OutputStream(int id,
71 sp<Surface> consumer,
72 uint32_t width, uint32_t height, size_t maxSize, int format,
73 android_dataspace dataSpace, camera_stream_rotation_t rotation,
74 nsecs_t timestampOffset, const String8& physicalCameraId,
75 const std::unordered_set<int32_t> &sensorPixelModesUsed,
76 int setId, bool isMultiResolution) :
77 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
78 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
79 setId, isMultiResolution),
80 mConsumer(consumer),
81 mTransform(0),
82 mTraceFirstBuffer(true),
83 mUseMonoTimestamp(false),
84 mUseBufferManager(false),
85 mTimestampOffset(timestampOffset),
86 mConsumerUsage(0),
87 mDropBuffers(false),
88 mDequeueBufferLatency(kDequeueLatencyBinSize) {
89
90 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
91 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
92 format);
93 mState = STATE_ERROR;
94 }
95
96 if (mConsumer == NULL) {
97 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
98 mState = STATE_ERROR;
99 }
100
101 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
102 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
103 }
104
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)105 Camera3OutputStream::Camera3OutputStream(int id,
106 uint32_t width, uint32_t height, int format,
107 uint64_t consumerUsage, android_dataspace dataSpace,
108 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
109 const String8& physicalCameraId,
110 const std::unordered_set<int32_t> &sensorPixelModesUsed,
111 int setId, bool isMultiResolution) :
112 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
113 /*maxSize*/0, format, dataSpace, rotation,
114 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
115 mConsumer(nullptr),
116 mTransform(0),
117 mTraceFirstBuffer(true),
118 mUseBufferManager(false),
119 mTimestampOffset(timestampOffset),
120 mConsumerUsage(consumerUsage),
121 mDropBuffers(false),
122 mDequeueBufferLatency(kDequeueLatencyBinSize) {
123 // Deferred consumer only support preview surface format now.
124 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
125 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
126 __FUNCTION__);
127 mState = STATE_ERROR;
128 }
129
130 // Validation check for the consumer usage flag.
131 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
132 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
133 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
134 __FUNCTION__, consumerUsage);
135 mState = STATE_ERROR;
136 }
137
138 mConsumerName = String8("Deferred");
139 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
140 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
141 }
142
Camera3OutputStream(int id,camera_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,uint64_t consumerUsage,nsecs_t timestampOffset,int setId,bool isMultiResolution)143 Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
144 uint32_t width, uint32_t height,
145 int format,
146 android_dataspace dataSpace,
147 camera_stream_rotation_t rotation,
148 const String8& physicalCameraId,
149 const std::unordered_set<int32_t> &sensorPixelModesUsed,
150 uint64_t consumerUsage, nsecs_t timestampOffset,
151 int setId, bool isMultiResolution) :
152 Camera3IOStreamBase(id, type, width, height,
153 /*maxSize*/0,
154 format, dataSpace, rotation,
155 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
156 mTransform(0),
157 mTraceFirstBuffer(true),
158 mUseMonoTimestamp(false),
159 mUseBufferManager(false),
160 mTimestampOffset(timestampOffset),
161 mConsumerUsage(consumerUsage),
162 mDropBuffers(false),
163 mDequeueBufferLatency(kDequeueLatencyBinSize) {
164
165 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
166 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
167
168 // Subclasses expected to initialize mConsumer themselves
169 }
170
171
~Camera3OutputStream()172 Camera3OutputStream::~Camera3OutputStream() {
173 disconnectLocked();
174 }
175
getBufferLocked(camera_stream_buffer * buffer,const std::vector<size_t> &)176 status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
177 const std::vector<size_t>&) {
178 ATRACE_HFR_CALL();
179
180 ANativeWindowBuffer* anb;
181 int fenceFd = -1;
182
183 status_t res;
184 res = getBufferLockedCommon(&anb, &fenceFd);
185 if (res != OK) {
186 return res;
187 }
188
189 /**
190 * FenceFD now owned by HAL except in case of error,
191 * in which case we reassign it to acquire_fence
192 */
193 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
194 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
195
196 return OK;
197 }
198
getBuffersLocked(std::vector<OutstandingBuffer> * outBuffers)199 status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
200 status_t res;
201
202 if ((res = getBufferPreconditionCheckLocked()) != OK) {
203 return res;
204 }
205
206 if (mUseBufferManager) {
207 ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
208 __FUNCTION__, mId);
209 return INVALID_OPERATION;
210 }
211
212 sp<Surface> consumer = mConsumer;
213 /**
214 * Release the lock briefly to avoid deadlock for below scenario:
215 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
216 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
217 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
218 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
219 * StreamingProcessor lock.
220 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
221 * and try to lock bufferQueue lock.
222 * Then there is circular locking dependency.
223 */
224 mLock.unlock();
225
226 size_t numBuffersRequested = outBuffers->size();
227 std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
228
229 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
230 res = consumer->dequeueBuffers(&buffers);
231 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
232 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
233
234 mLock.lock();
235
236 if (res != OK) {
237 if (shouldLogError(res, mState)) {
238 ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
239 __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
240 }
241 checkRetAndSetAbandonedLocked(res);
242 return res;
243 }
244 checkRemovedBuffersLocked();
245
246 /**
247 * FenceFD now owned by HAL except in case of error,
248 * in which case we reassign it to acquire_fence
249 */
250 for (size_t i = 0; i < numBuffersRequested; i++) {
251 handoutBufferLocked(*(outBuffers->at(i).outBuffer),
252 &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
253 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
254 }
255 return OK;
256 }
257
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)258 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
259 ANativeWindowBuffer* buffer, int anwReleaseFence,
260 const std::vector<size_t>&) {
261 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
262 }
263
returnBufferLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,const std::vector<size_t> & surface_ids)264 status_t Camera3OutputStream::returnBufferLocked(
265 const camera_stream_buffer &buffer,
266 nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
267 ATRACE_HFR_CALL();
268
269 if (mHandoutTotalBufferCount == 1) {
270 returnPrefetchedBuffersLocked();
271 }
272
273 status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
274
275 if (res != OK) {
276 return res;
277 }
278
279 mLastTimestamp = timestamp;
280 mFrameCount++;
281
282 return OK;
283 }
284
returnBufferCheckedLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,bool output,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)285 status_t Camera3OutputStream::returnBufferCheckedLocked(
286 const camera_stream_buffer &buffer,
287 nsecs_t timestamp,
288 bool output,
289 const std::vector<size_t>& surface_ids,
290 /*out*/
291 sp<Fence> *releaseFenceOut) {
292
293 (void)output;
294 ALOG_ASSERT(output, "Expected output to be true");
295
296 status_t res;
297
298 // Fence management - always honor release fence from HAL
299 sp<Fence> releaseFence = new Fence(buffer.release_fence);
300 int anwReleaseFence = releaseFence->dup();
301
302 /**
303 * Release the lock briefly to avoid deadlock with
304 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
305 * thread will go into StreamingProcessor::onFrameAvailable) during
306 * queueBuffer
307 */
308 sp<ANativeWindow> currentConsumer = mConsumer;
309 StreamState state = mState;
310 mLock.unlock();
311
312 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
313 /**
314 * Return buffer back to ANativeWindow
315 */
316 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
317 // Cancel buffer
318 if (mDropBuffers) {
319 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
320 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
321 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
322 } else {
323 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
324 }
325
326 res = currentConsumer->cancelBuffer(currentConsumer.get(),
327 anwBuffer,
328 anwReleaseFence);
329 if (shouldLogError(res, state)) {
330 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
331 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
332 }
333
334 notifyBufferReleased(anwBuffer);
335 if (mUseBufferManager) {
336 // Return this buffer back to buffer manager.
337 mBufferProducerListener->onBufferReleased();
338 }
339 } else {
340 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
341 {
342 char traceLog[48];
343 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
344 ATRACE_NAME(traceLog);
345 }
346 mTraceFirstBuffer = false;
347 }
348
349 /* Certain consumers (such as AudioSource or HardwareComposer) use
350 * MONOTONIC time, causing time misalignment if camera timestamp is
351 * in BOOTTIME. Do the conversion if necessary. */
352 res = native_window_set_buffers_timestamp(mConsumer.get(),
353 mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
354 if (res != OK) {
355 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
356 __FUNCTION__, mId, strerror(-res), res);
357 return res;
358 }
359 // If this is a JPEG output, and image dump mask is set, save image to
360 // disk.
361 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
362 mImageDumpMask) {
363 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
364 }
365
366 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
367 if (shouldLogError(res, state)) {
368 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
369 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
370 }
371 }
372 mLock.lock();
373
374 // Once a valid buffer has been returned to the queue, can no longer
375 // dequeue all buffers for preallocation.
376 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
377 mStreamUnpreparable = true;
378 }
379
380 if (res != OK) {
381 close(anwReleaseFence);
382 }
383
384 *releaseFenceOut = releaseFence;
385
386 return res;
387 }
388
dump(int fd,const Vector<String16> & args) const389 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
390 (void) args;
391 String8 lines;
392 lines.appendFormat(" Stream[%d]: Output\n", mId);
393 lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
394 write(fd, lines.string(), lines.size());
395
396 Camera3IOStreamBase::dump(fd, args);
397
398 mDequeueBufferLatency.dump(fd,
399 " DequeueBuffer latency histogram:");
400 }
401
setTransform(int transform)402 status_t Camera3OutputStream::setTransform(int transform) {
403 ATRACE_CALL();
404 Mutex::Autolock l(mLock);
405 return setTransformLocked(transform);
406 }
407
setTransformLocked(int transform)408 status_t Camera3OutputStream::setTransformLocked(int transform) {
409 status_t res = OK;
410 if (mState == STATE_ERROR) {
411 ALOGE("%s: Stream in error state", __FUNCTION__);
412 return INVALID_OPERATION;
413 }
414
415 mTransform = transform;
416 if (mState == STATE_CONFIGURED) {
417 res = native_window_set_buffers_transform(mConsumer.get(),
418 transform);
419 if (res != OK) {
420 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
421 __FUNCTION__, transform, strerror(-res), res);
422 }
423 }
424 return res;
425 }
426
configureQueueLocked()427 status_t Camera3OutputStream::configureQueueLocked() {
428 status_t res;
429
430 mTraceFirstBuffer = true;
431 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
432 return res;
433 }
434
435 if ((res = configureConsumerQueueLocked()) != OK) {
436 return res;
437 }
438
439 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
440 // We need skip these cases as timeout will disable the non-blocking (async) mode.
441 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
442 if (mUseBufferManager) {
443 // When buffer manager is handling the buffer, we should have available buffers in
444 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
445 // free buffers.
446 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
447 // can discard free buffers without notifying buffer manager. We want the timeout to
448 // happen immediately here so buffer manager can try to update its internal state and
449 // try to allocate a buffer instead of waiting.
450 mConsumer->setDequeueTimeout(0);
451 } else {
452 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
453 }
454 }
455
456 return OK;
457 }
458
configureConsumerQueueLocked()459 status_t Camera3OutputStream::configureConsumerQueueLocked() {
460 status_t res;
461
462 mTraceFirstBuffer = true;
463
464 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
465
466 // Configure consumer-side ANativeWindow interface. The listener may be used
467 // to notify buffer manager (if it is used) of the returned buffers.
468 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
469 /*reportBufferRemoval*/true,
470 /*listener*/mBufferProducerListener);
471 if (res != OK) {
472 ALOGE("%s: Unable to connect to native window for stream %d",
473 __FUNCTION__, mId);
474 return res;
475 }
476
477 mConsumerName = mConsumer->getConsumerName();
478
479 res = native_window_set_usage(mConsumer.get(), mUsage);
480 if (res != OK) {
481 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
482 __FUNCTION__, mUsage, mId);
483 return res;
484 }
485
486 res = native_window_set_scaling_mode(mConsumer.get(),
487 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
488 if (res != OK) {
489 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
490 __FUNCTION__, strerror(-res), res);
491 return res;
492 }
493
494 if (mMaxSize == 0) {
495 // For buffers of known size
496 res = native_window_set_buffers_dimensions(mConsumer.get(),
497 camera_stream::width, camera_stream::height);
498 } else {
499 // For buffers with bounded size
500 res = native_window_set_buffers_dimensions(mConsumer.get(),
501 mMaxSize, 1);
502 }
503 if (res != OK) {
504 ALOGE("%s: Unable to configure stream buffer dimensions"
505 " %d x %d (maxSize %zu) for stream %d",
506 __FUNCTION__, camera_stream::width, camera_stream::height,
507 mMaxSize, mId);
508 return res;
509 }
510 res = native_window_set_buffers_format(mConsumer.get(),
511 camera_stream::format);
512 if (res != OK) {
513 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
514 __FUNCTION__, camera_stream::format, mId);
515 return res;
516 }
517
518 res = native_window_set_buffers_data_space(mConsumer.get(),
519 camera_stream::data_space);
520 if (res != OK) {
521 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
522 __FUNCTION__, camera_stream::data_space, mId);
523 return res;
524 }
525
526 int maxConsumerBuffers;
527 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
528 mConsumer.get(),
529 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
530 if (res != OK) {
531 ALOGE("%s: Unable to query consumer undequeued"
532 " buffer count for stream %d", __FUNCTION__, mId);
533 return res;
534 }
535
536 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
537 maxConsumerBuffers, camera_stream::max_buffers);
538 if (camera_stream::max_buffers == 0) {
539 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
540 __FUNCTION__, camera_stream::max_buffers);
541 return INVALID_OPERATION;
542 }
543
544 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
545 mHandoutTotalBufferCount = 0;
546 mFrameCount = 0;
547 mLastTimestamp = 0;
548 mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
549
550 res = native_window_set_buffer_count(mConsumer.get(),
551 mTotalBufferCount);
552 if (res != OK) {
553 ALOGE("%s: Unable to set buffer count for stream %d",
554 __FUNCTION__, mId);
555 return res;
556 }
557
558 res = native_window_set_buffers_transform(mConsumer.get(),
559 mTransform);
560 if (res != OK) {
561 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
562 __FUNCTION__, mTransform, strerror(-res), res);
563 return res;
564 }
565
566 /**
567 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
568 * buffers to be statically allocated for internal static buffer registration, while the
569 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
570 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
571 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
572 * HAL3.2 devices may not support the dynamic buffer registeration.
573 * Also Camera3BufferManager does not support display/texture streams as they have its own
574 * buffer management logic.
575 */
576 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
577 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
578 uint64_t consumerUsage = 0;
579 getEndpointUsage(&consumerUsage);
580 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
581 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
582 StreamInfo streamInfo(
583 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
584 mUsage | consumerUsage, mTotalBufferCount,
585 /*isConfigured*/true, isMultiResolution());
586 wp<Camera3OutputStream> weakThis(this);
587 res = mBufferManager->registerStream(weakThis,
588 streamInfo);
589 if (res == OK) {
590 // Disable buffer allocation for this BufferQueue, buffer manager will take over
591 // the buffer allocation responsibility.
592 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
593 mUseBufferManager = true;
594 } else {
595 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
596 "(error %d %s), fall back to BufferQueue for buffer management!",
597 __FUNCTION__, mId, res, strerror(-res));
598 }
599 }
600
601 return OK;
602 }
603
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)604 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
605 ATRACE_HFR_CALL();
606 status_t res;
607
608 if ((res = getBufferPreconditionCheckLocked()) != OK) {
609 return res;
610 }
611
612 bool gotBufferFromManager = false;
613
614 if (mUseBufferManager) {
615 sp<GraphicBuffer> gb;
616 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
617 isMultiResolution(), &gb, fenceFd);
618 if (res == OK) {
619 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
620 // successful return.
621 *anb = gb.get();
622 res = mConsumer->attachBuffer(*anb);
623 if (shouldLogError(res, mState)) {
624 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
625 __FUNCTION__, mId, strerror(-res), res);
626 }
627 if (res != OK) {
628 checkRetAndSetAbandonedLocked(res);
629 return res;
630 }
631 gotBufferFromManager = true;
632 ALOGV("Stream %d: Attached new buffer", getId());
633 } else if (res == ALREADY_EXISTS) {
634 // Have sufficient free buffers already attached, can just
635 // dequeue from buffer queue
636 ALOGV("Stream %d: Reusing attached buffer", getId());
637 gotBufferFromManager = false;
638 } else if (res != OK) {
639 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
640 __FUNCTION__, mId, strerror(-res), res);
641 return res;
642 }
643 }
644 if (!gotBufferFromManager) {
645 /**
646 * Release the lock briefly to avoid deadlock for below scenario:
647 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
648 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
649 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
650 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
651 * StreamingProcessor lock.
652 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
653 * and try to lock bufferQueue lock.
654 * Then there is circular locking dependency.
655 */
656 sp<Surface> consumer = mConsumer;
657 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
658 camera_stream::max_buffers) - mHandoutTotalBufferCount;
659 mLock.unlock();
660
661 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
662
663 size_t batchSize = mBatchSize.load();
664 if (batchSize == 1) {
665 sp<ANativeWindow> anw = consumer;
666 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
667 } else {
668 std::unique_lock<std::mutex> batchLock(mBatchLock);
669 res = OK;
670 if (mBatchedBuffers.size() == 0) {
671 if (remainingBuffers == 0) {
672 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
673 return INVALID_OPERATION;
674 }
675 if (batchSize > remainingBuffers) {
676 batchSize = remainingBuffers;
677 }
678 batchLock.unlock();
679 // Refill batched buffers
680 std::vector<Surface::BatchBuffer> batchedBuffers;
681 batchedBuffers.resize(batchSize);
682 res = consumer->dequeueBuffers(&batchedBuffers);
683 batchLock.lock();
684 if (res != OK) {
685 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
686 __FUNCTION__, strerror(-res), res);
687 } else {
688 mBatchedBuffers = std::move(batchedBuffers);
689 }
690 }
691
692 if (res == OK) {
693 // Dispatch batch buffers
694 *anb = mBatchedBuffers.back().buffer;
695 *fenceFd = mBatchedBuffers.back().fenceFd;
696 mBatchedBuffers.pop_back();
697 }
698 }
699
700 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
701 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
702
703 mLock.lock();
704
705 if (mUseBufferManager && res == TIMED_OUT) {
706 checkRemovedBuffersLocked();
707
708 sp<GraphicBuffer> gb;
709 res = mBufferManager->getBufferForStream(
710 getId(), getStreamSetId(), isMultiResolution(),
711 &gb, fenceFd, /*noFreeBuffer*/true);
712
713 if (res == OK) {
714 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
715 // a successful return.
716 *anb = gb.get();
717 res = mConsumer->attachBuffer(*anb);
718 gotBufferFromManager = true;
719 ALOGV("Stream %d: Attached new buffer", getId());
720
721 if (res != OK) {
722 if (shouldLogError(res, mState)) {
723 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
724 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
725 }
726 checkRetAndSetAbandonedLocked(res);
727 return res;
728 }
729 } else {
730 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
731 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
732 return res;
733 }
734 } else if (res != OK) {
735 if (shouldLogError(res, mState)) {
736 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
737 __FUNCTION__, mId, strerror(-res), res);
738 }
739 checkRetAndSetAbandonedLocked(res);
740 return res;
741 }
742 }
743
744 if (res == OK) {
745 checkRemovedBuffersLocked();
746 }
747
748 return res;
749 }
750
checkRemovedBuffersLocked(bool notifyBufferManager)751 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
752 std::vector<sp<GraphicBuffer>> removedBuffers;
753 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
754 if (res == OK) {
755 onBuffersRemovedLocked(removedBuffers);
756
757 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
758 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
759 removedBuffers.size());
760 }
761 }
762 }
763
checkRetAndSetAbandonedLocked(status_t res)764 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
765 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
766 // STATE_PREPARING, let prepareNextBuffer handle the error.)
767 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
768 mState = STATE_ABANDONED;
769 }
770 }
771
shouldLogError(status_t res,StreamState state)772 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
773 if (res == OK) {
774 return false;
775 }
776 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
777 return false;
778 }
779 return true;
780 }
781
disconnectLocked()782 status_t Camera3OutputStream::disconnectLocked() {
783 status_t res;
784
785 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
786 return res;
787 }
788
789 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
790 // state), don't need change the stream state, return OK.
791 if (mConsumer == nullptr) {
792 return OK;
793 }
794
795 returnPrefetchedBuffersLocked();
796
797 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
798
799 res = native_window_api_disconnect(mConsumer.get(),
800 NATIVE_WINDOW_API_CAMERA);
801 /**
802 * This is not an error. if client calling process dies, the window will
803 * also die and all calls to it will return DEAD_OBJECT, thus it's already
804 * "disconnected"
805 */
806 if (res == DEAD_OBJECT) {
807 ALOGW("%s: While disconnecting stream %d from native window, the"
808 " native window died from under us", __FUNCTION__, mId);
809 }
810 else if (res != OK) {
811 ALOGE("%s: Unable to disconnect stream %d from native window "
812 "(error %d %s)",
813 __FUNCTION__, mId, res, strerror(-res));
814 mState = STATE_ERROR;
815 return res;
816 }
817
818 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
819 // stream at this point should be safe.
820 if (mUseBufferManager) {
821 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
822 if (res != OK) {
823 ALOGE("%s: Unable to unregister stream %d from buffer manager "
824 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
825 mState = STATE_ERROR;
826 return res;
827 }
828 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
829 // the stream is still in usable state after this call.
830 mUseBufferManager = false;
831 }
832
833 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
834 : STATE_CONSTRUCTED;
835
836 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
837 mDequeueBufferLatency.reset();
838 return OK;
839 }
840
getEndpointUsage(uint64_t * usage) const841 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
842
843 status_t res;
844
845 if (mConsumer == nullptr) {
846 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
847 *usage = mConsumerUsage;
848 return OK;
849 }
850
851 res = getEndpointUsageForSurface(usage, mConsumer);
852
853 return res;
854 }
855
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)856 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
857 if (consumerUsage == nullptr) {
858 return;
859 }
860
861 // If an opaque output stream's endpoint is ImageReader, add
862 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
863 // for the ZSL use case.
864 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
865 // 1. GRALLOC_USAGE_HW_TEXTURE
866 // 2. GRALLOC_USAGE_HW_RENDER
867 // 3. GRALLOC_USAGE_HW_COMPOSER
868 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
869 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
870 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
871 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
872 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
873 }
874 }
875
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface) const876 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
877 const sp<Surface>& surface) const {
878 status_t res;
879 uint64_t u = 0;
880
881 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
882 applyZSLUsageQuirk(camera_stream::format, &u);
883 *usage = u;
884 return res;
885 }
886
isVideoStream() const887 bool Camera3OutputStream::isVideoStream() const {
888 uint64_t usage = 0;
889 status_t res = getEndpointUsage(&usage);
890 if (res != OK) {
891 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
892 return false;
893 }
894
895 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
896 }
897
setBufferManager(sp<Camera3BufferManager> bufferManager)898 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
899 Mutex::Autolock l(mLock);
900 if (mState != STATE_CONSTRUCTED) {
901 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
902 __FUNCTION__);
903 return INVALID_OPERATION;
904 }
905 mBufferManager = bufferManager;
906
907 return OK;
908 }
909
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)910 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
911 const std::vector<OutputStreamInfo> &/*outputInfo*/,
912 const std::vector<size_t> &/*removedSurfaceIds*/,
913 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
914 ALOGE("%s: this method is not supported!", __FUNCTION__);
915 return INVALID_OPERATION;
916 }
917
onBufferReleased()918 void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
919 sp<Camera3OutputStream> stream = mParent.promote();
920 if (stream == nullptr) {
921 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
922 return;
923 }
924
925 Mutex::Autolock l(stream->mLock);
926 if (!(stream->mUseBufferManager)) {
927 return;
928 }
929
930 ALOGV("Stream %d: Buffer released", stream->getId());
931 bool shouldFreeBuffer = false;
932 status_t res = stream->mBufferManager->onBufferReleased(
933 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
934 &shouldFreeBuffer);
935 if (res != OK) {
936 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
937 strerror(-res), res);
938 stream->mState = STATE_ERROR;
939 }
940
941 if (shouldFreeBuffer) {
942 sp<GraphicBuffer> buffer;
943 // Detach and free a buffer (when buffer goes out of scope)
944 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
945 if (buffer.get() != nullptr) {
946 stream->mBufferManager->notifyBufferRemoved(
947 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
948 }
949 }
950 }
951
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)952 void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
953 const std::vector<sp<GraphicBuffer>>& buffers) {
954 sp<Camera3OutputStream> stream = mParent.promote();
955 if (stream == nullptr) {
956 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
957 return;
958 }
959
960 if (buffers.size() > 0) {
961 Mutex::Autolock l(stream->mLock);
962 stream->onBuffersRemovedLocked(buffers);
963 if (stream->mUseBufferManager) {
964 stream->mBufferManager->onBuffersRemoved(stream->getId(),
965 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
966 }
967 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
968 }
969 }
970
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)971 void Camera3OutputStream::onBuffersRemovedLocked(
972 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
973 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
974 if (callback != nullptr) {
975 for (const auto& gb : removedBuffers) {
976 callback->onBufferFreed(mId, gb->handle);
977 }
978 }
979 }
980
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)981 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
982 Mutex::Autolock l(mLock);
983 return detachBufferLocked(buffer, fenceFd);
984 }
985
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)986 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
987 ALOGV("Stream %d: detachBuffer", getId());
988 if (buffer == nullptr) {
989 return BAD_VALUE;
990 }
991
992 sp<Fence> fence;
993 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
994 if (res == NO_MEMORY) {
995 // This may rarely happen, which indicates that the released buffer was freed by other
996 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
997 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
998 // therefore log a warning.
999 *buffer = 0;
1000 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1001 } else if (res != OK) {
1002 // Treat other errors as abandonment
1003 if (shouldLogError(res, mState)) {
1004 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1005 }
1006 mState = STATE_ABANDONED;
1007 return res;
1008 }
1009
1010 if (fenceFd != nullptr) {
1011 if (fence!= 0 && fence->isValid()) {
1012 *fenceFd = fence->dup();
1013 } else {
1014 *fenceFd = -1;
1015 }
1016 }
1017
1018 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1019 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
1020 return res;
1021 }
1022
dropBuffers(bool dropping)1023 status_t Camera3OutputStream::dropBuffers(bool dropping) {
1024 Mutex::Autolock l(mLock);
1025 mDropBuffers = dropping;
1026 return OK;
1027 }
1028
getPhysicalCameraId() const1029 const String8& Camera3OutputStream::getPhysicalCameraId() const {
1030 Mutex::Autolock l(mLock);
1031 return physicalCameraId();
1032 }
1033
notifyBufferReleased(ANativeWindowBuffer *)1034 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
1035 return OK;
1036 }
1037
isConsumerConfigurationDeferred(size_t surface_id) const1038 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
1039 Mutex::Autolock l(mLock);
1040
1041 if (surface_id != 0) {
1042 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
1043 }
1044 return mConsumer == nullptr;
1045 }
1046
setConsumers(const std::vector<sp<Surface>> & consumers)1047 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
1048 Mutex::Autolock l(mLock);
1049 if (consumers.size() != 1) {
1050 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1051 __FUNCTION__, consumers.size());
1052 return INVALID_OPERATION;
1053 }
1054 if (consumers[0] == nullptr) {
1055 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
1056 return INVALID_OPERATION;
1057 }
1058
1059 if (mConsumer != nullptr) {
1060 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1061 return INVALID_OPERATION;
1062 }
1063
1064 mConsumer = consumers[0];
1065 return OK;
1066 }
1067
isConsumedByHWComposer() const1068 bool Camera3OutputStream::isConsumedByHWComposer() const {
1069 uint64_t usage = 0;
1070 status_t res = getEndpointUsage(&usage);
1071 if (res != OK) {
1072 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1073 return false;
1074 }
1075
1076 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1077 }
1078
isConsumedByHWTexture() const1079 bool Camera3OutputStream::isConsumedByHWTexture() const {
1080 uint64_t usage = 0;
1081 status_t res = getEndpointUsage(&usage);
1082 if (res != OK) {
1083 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1084 return false;
1085 }
1086
1087 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1088 }
1089
dumpImageToDisk(nsecs_t timestamp,ANativeWindowBuffer * anwBuffer,int fence)1090 void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1091 ANativeWindowBuffer* anwBuffer, int fence) {
1092 // Deriver output file name
1093 std::string fileExtension = "jpg";
1094 char imageFileName[64];
1095 time_t now = time(0);
1096 tm *localTime = localtime(&now);
1097 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
1098 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
1099 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1100 timestamp, fileExtension.c_str());
1101
1102 // Lock the image for CPU read
1103 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1104 void* mapped = nullptr;
1105 base::unique_fd fenceFd(dup(fence));
1106 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
1107 fenceFd.get());
1108 if (res != OK) {
1109 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1110 return;
1111 }
1112
1113 // Figure out actual file size
1114 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1115 if (actualJpegSize == 0) {
1116 actualJpegSize = mMaxSize;
1117 }
1118
1119 // Output image data to file
1120 std::string filePath = "/data/misc/cameraserver/";
1121 filePath += imageFileName;
1122 std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
1123 if (!imageFile.is_open()) {
1124 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1125 graphicBuffer->unlock();
1126 return;
1127 }
1128 imageFile.write((const char*)mapped, actualJpegSize);
1129
1130 graphicBuffer->unlock();
1131 }
1132
setBatchSize(size_t batchSize)1133 status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1134 Mutex::Autolock l(mLock);
1135 if (batchSize == 0) {
1136 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1137 return BAD_VALUE;
1138 }
1139
1140 if (mUseBufferManager) {
1141 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1142 return INVALID_OPERATION;
1143 }
1144
1145 if (!isVideoStream()) {
1146 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1147 return INVALID_OPERATION;
1148 }
1149
1150 if (camera_stream::max_buffers < batchSize) {
1151 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1152 camera_stream::max_buffers);
1153 batchSize = camera_stream::max_buffers;
1154 }
1155
1156 size_t defaultBatchSize = 1;
1157 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1158 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1159 __FUNCTION__, defaultBatchSize, batchSize);
1160 return INVALID_OPERATION;
1161 }
1162
1163 return OK;
1164 }
1165
returnPrefetchedBuffersLocked()1166 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
1167 std::vector<Surface::BatchBuffer> batchedBuffers;
1168
1169 {
1170 std::lock_guard<std::mutex> batchLock(mBatchLock);
1171 if (mBatchedBuffers.size() != 0) {
1172 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1173 __FUNCTION__, mBatchedBuffers.size());
1174 batchedBuffers = std::move(mBatchedBuffers);
1175 }
1176 }
1177
1178 if (batchedBuffers.size() > 0) {
1179 mConsumer->cancelBuffers(batchedBuffers);
1180 }
1181 }
1182
1183 }; // namespace camera3
1184
1185 }; // namespace android
1186