• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013-2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 #include "Camera3OutputStream.h"
24 
25 #ifndef container_of
26 #define container_of(ptr, type, member) \
27     (type *)((char*)(ptr) - offsetof(type, member))
28 #endif
29 
30 namespace android {
31 
32 namespace camera3 {
33 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)34 Camera3OutputStream::Camera3OutputStream(int id,
35         sp<Surface> consumer,
36         uint32_t width, uint32_t height, int format,
37         android_dataspace dataSpace, camera3_stream_rotation_t rotation,
38         nsecs_t timestampOffset, const String8& physicalCameraId,
39         int setId) :
40         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
41                             /*maxSize*/0, format, dataSpace, rotation,
42                             physicalCameraId, setId),
43         mConsumer(consumer),
44         mTransform(0),
45         mTraceFirstBuffer(true),
46         mUseBufferManager(false),
47         mTimestampOffset(timestampOffset),
48         mConsumerUsage(0),
49         mDropBuffers(false),
50         mDequeueBufferLatency(kDequeueLatencyBinSize) {
51 
52     if (mConsumer == NULL) {
53         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
54         mState = STATE_ERROR;
55     }
56 
57     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
58         mBufferReleasedListener = new BufferReleasedListener(this);
59     }
60 }
61 
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)62 Camera3OutputStream::Camera3OutputStream(int id,
63         sp<Surface> consumer,
64         uint32_t width, uint32_t height, size_t maxSize, int format,
65         android_dataspace dataSpace, camera3_stream_rotation_t rotation,
66         nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
67         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
68                             format, dataSpace, rotation, physicalCameraId, setId),
69         mConsumer(consumer),
70         mTransform(0),
71         mTraceFirstBuffer(true),
72         mUseMonoTimestamp(false),
73         mUseBufferManager(false),
74         mTimestampOffset(timestampOffset),
75         mConsumerUsage(0),
76         mDropBuffers(false),
77         mDequeueBufferLatency(kDequeueLatencyBinSize) {
78 
79     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
80         ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
81                 format);
82         mState = STATE_ERROR;
83     }
84 
85     if (mConsumer == NULL) {
86         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
87         mState = STATE_ERROR;
88     }
89 
90     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
91         mBufferReleasedListener = new BufferReleasedListener(this);
92     }
93 }
94 
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera3_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,int setId)95 Camera3OutputStream::Camera3OutputStream(int id,
96         uint32_t width, uint32_t height, int format,
97         uint64_t consumerUsage, android_dataspace dataSpace,
98         camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
99         const String8& physicalCameraId, int setId) :
100         Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
101                             /*maxSize*/0, format, dataSpace, rotation,
102                             physicalCameraId, setId),
103         mConsumer(nullptr),
104         mTransform(0),
105         mTraceFirstBuffer(true),
106         mUseBufferManager(false),
107         mTimestampOffset(timestampOffset),
108         mConsumerUsage(consumerUsage),
109         mDropBuffers(false),
110         mDequeueBufferLatency(kDequeueLatencyBinSize) {
111     // Deferred consumer only support preview surface format now.
112     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
113         ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
114                 __FUNCTION__);
115         mState = STATE_ERROR;
116     }
117 
118     // Sanity check for the consumer usage flag.
119     if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
120             (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
121         ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
122               __FUNCTION__, consumerUsage);
123         mState = STATE_ERROR;
124     }
125 
126     mConsumerName = String8("Deferred");
127     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
128         mBufferReleasedListener = new BufferReleasedListener(this);
129     }
130 
131 }
132 
Camera3OutputStream(int id,camera3_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera3_stream_rotation_t rotation,const String8 & physicalCameraId,uint64_t consumerUsage,nsecs_t timestampOffset,int setId)133 Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
134                                          uint32_t width, uint32_t height,
135                                          int format,
136                                          android_dataspace dataSpace,
137                                          camera3_stream_rotation_t rotation,
138                                          const String8& physicalCameraId,
139                                          uint64_t consumerUsage, nsecs_t timestampOffset,
140                                          int setId) :
141         Camera3IOStreamBase(id, type, width, height,
142                             /*maxSize*/0,
143                             format, dataSpace, rotation,
144                             physicalCameraId, setId),
145         mTransform(0),
146         mTraceFirstBuffer(true),
147         mUseMonoTimestamp(false),
148         mUseBufferManager(false),
149         mTimestampOffset(timestampOffset),
150         mConsumerUsage(consumerUsage),
151         mDropBuffers(false),
152         mDequeueBufferLatency(kDequeueLatencyBinSize) {
153 
154     if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
155         mBufferReleasedListener = new BufferReleasedListener(this);
156     }
157 
158     // Subclasses expected to initialize mConsumer themselves
159 }
160 
161 
~Camera3OutputStream()162 Camera3OutputStream::~Camera3OutputStream() {
163     disconnectLocked();
164 }
165 
getBufferLocked(camera3_stream_buffer * buffer,const std::vector<size_t> &)166 status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer,
167         const std::vector<size_t>&) {
168     ATRACE_CALL();
169 
170     ANativeWindowBuffer* anb;
171     int fenceFd = -1;
172 
173     status_t res;
174     res = getBufferLockedCommon(&anb, &fenceFd);
175     if (res != OK) {
176         return res;
177     }
178 
179     /**
180      * FenceFD now owned by HAL except in case of error,
181      * in which case we reassign it to acquire_fence
182      */
183     handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
184                         /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK, /*output*/true);
185 
186     return OK;
187 }
188 
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)189 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
190             ANativeWindowBuffer* buffer, int anwReleaseFence,
191             const std::vector<size_t>&) {
192     return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
193 }
194 
returnBufferLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,const std::vector<size_t> & surface_ids)195 status_t Camera3OutputStream::returnBufferLocked(
196         const camera3_stream_buffer &buffer,
197         nsecs_t timestamp, const std::vector<size_t>& surface_ids) {
198     ATRACE_CALL();
199 
200     status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, surface_ids);
201 
202     if (res != OK) {
203         return res;
204     }
205 
206     mLastTimestamp = timestamp;
207     mFrameCount++;
208 
209     return OK;
210 }
211 
returnBufferCheckedLocked(const camera3_stream_buffer & buffer,nsecs_t timestamp,bool output,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)212 status_t Camera3OutputStream::returnBufferCheckedLocked(
213             const camera3_stream_buffer &buffer,
214             nsecs_t timestamp,
215             bool output,
216             const std::vector<size_t>& surface_ids,
217             /*out*/
218             sp<Fence> *releaseFenceOut) {
219 
220     (void)output;
221     ALOG_ASSERT(output, "Expected output to be true");
222 
223     status_t res;
224 
225     // Fence management - always honor release fence from HAL
226     sp<Fence> releaseFence = new Fence(buffer.release_fence);
227     int anwReleaseFence = releaseFence->dup();
228 
229     /**
230      * Release the lock briefly to avoid deadlock with
231      * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
232      * thread will go into StreamingProcessor::onFrameAvailable) during
233      * queueBuffer
234      */
235     sp<ANativeWindow> currentConsumer = mConsumer;
236     StreamState state = mState;
237     mLock.unlock();
238 
239     ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
240     /**
241      * Return buffer back to ANativeWindow
242      */
243     if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
244         // Cancel buffer
245         if (mDropBuffers) {
246             ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
247         } else if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
248             ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
249         } else {
250             ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
251         }
252 
253         res = currentConsumer->cancelBuffer(currentConsumer.get(),
254                 anwBuffer,
255                 anwReleaseFence);
256         if (shouldLogError(res, state)) {
257             ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
258                   " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
259         }
260 
261         notifyBufferReleased(anwBuffer);
262         if (mUseBufferManager) {
263             // Return this buffer back to buffer manager.
264             mBufferReleasedListener->onBufferReleased();
265         }
266     } else {
267         if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
268             {
269                 char traceLog[48];
270                 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
271                 ATRACE_NAME(traceLog);
272             }
273             mTraceFirstBuffer = false;
274         }
275 
276         /* Certain consumers (such as AudioSource or HardwareComposer) use
277          * MONOTONIC time, causing time misalignment if camera timestamp is
278          * in BOOTTIME. Do the conversion if necessary. */
279         res = native_window_set_buffers_timestamp(mConsumer.get(),
280                 mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
281         if (res != OK) {
282             ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
283                   __FUNCTION__, mId, strerror(-res), res);
284             return res;
285         }
286 
287         res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
288         if (shouldLogError(res, state)) {
289             ALOGE("%s: Stream %d: Error queueing buffer to native window:"
290                   " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
291         }
292     }
293     mLock.lock();
294 
295     // Once a valid buffer has been returned to the queue, can no longer
296     // dequeue all buffers for preallocation.
297     if (buffer.status != CAMERA3_BUFFER_STATUS_ERROR) {
298         mStreamUnpreparable = true;
299     }
300 
301     if (res != OK) {
302         close(anwReleaseFence);
303     }
304 
305     *releaseFenceOut = releaseFence;
306 
307     return res;
308 }
309 
dump(int fd,const Vector<String16> & args) const310 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
311     (void) args;
312     String8 lines;
313     lines.appendFormat("    Stream[%d]: Output\n", mId);
314     lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
315     write(fd, lines.string(), lines.size());
316 
317     Camera3IOStreamBase::dump(fd, args);
318 
319     mDequeueBufferLatency.dump(fd,
320         "      DequeueBuffer latency histogram:");
321 }
322 
setTransform(int transform)323 status_t Camera3OutputStream::setTransform(int transform) {
324     ATRACE_CALL();
325     Mutex::Autolock l(mLock);
326     return setTransformLocked(transform);
327 }
328 
setTransformLocked(int transform)329 status_t Camera3OutputStream::setTransformLocked(int transform) {
330     status_t res = OK;
331     if (mState == STATE_ERROR) {
332         ALOGE("%s: Stream in error state", __FUNCTION__);
333         return INVALID_OPERATION;
334     }
335 
336     mTransform = transform;
337     if (mState == STATE_CONFIGURED) {
338         res = native_window_set_buffers_transform(mConsumer.get(),
339                 transform);
340         if (res != OK) {
341             ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
342                     __FUNCTION__, transform, strerror(-res), res);
343         }
344     }
345     return res;
346 }
347 
configureQueueLocked()348 status_t Camera3OutputStream::configureQueueLocked() {
349     status_t res;
350 
351     mTraceFirstBuffer = true;
352     if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
353         return res;
354     }
355 
356     if ((res = configureConsumerQueueLocked()) != OK) {
357         return res;
358     }
359 
360     // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
361     // We need skip these cases as timeout will disable the non-blocking (async) mode.
362     if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
363         if (mUseBufferManager) {
364             // When buffer manager is handling the buffer, we should have available buffers in
365             // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
366             // free buffers.
367             // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
368             // can discard free buffers without notifying buffer manager. We want the timeout to
369             // happen immediately here so buffer manager can try to update its internal state and
370             // try to allocate a buffer instead of waiting.
371             mConsumer->setDequeueTimeout(0);
372         } else {
373             mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
374         }
375     }
376 
377     return OK;
378 }
379 
configureConsumerQueueLocked()380 status_t Camera3OutputStream::configureConsumerQueueLocked() {
381     status_t res;
382 
383     mTraceFirstBuffer = true;
384 
385     ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
386 
387     // Configure consumer-side ANativeWindow interface. The listener may be used
388     // to notify buffer manager (if it is used) of the returned buffers.
389     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
390             /*listener*/mBufferReleasedListener,
391             /*reportBufferRemoval*/true);
392     if (res != OK) {
393         ALOGE("%s: Unable to connect to native window for stream %d",
394                 __FUNCTION__, mId);
395         return res;
396     }
397 
398     mConsumerName = mConsumer->getConsumerName();
399 
400     res = native_window_set_usage(mConsumer.get(), mUsage);
401     if (res != OK) {
402         ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
403                 __FUNCTION__, mUsage, mId);
404         return res;
405     }
406 
407     res = native_window_set_scaling_mode(mConsumer.get(),
408             NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
409     if (res != OK) {
410         ALOGE("%s: Unable to configure stream scaling: %s (%d)",
411                 __FUNCTION__, strerror(-res), res);
412         return res;
413     }
414 
415     if (mMaxSize == 0) {
416         // For buffers of known size
417         res = native_window_set_buffers_dimensions(mConsumer.get(),
418                 camera3_stream::width, camera3_stream::height);
419     } else {
420         // For buffers with bounded size
421         res = native_window_set_buffers_dimensions(mConsumer.get(),
422                 mMaxSize, 1);
423     }
424     if (res != OK) {
425         ALOGE("%s: Unable to configure stream buffer dimensions"
426                 " %d x %d (maxSize %zu) for stream %d",
427                 __FUNCTION__, camera3_stream::width, camera3_stream::height,
428                 mMaxSize, mId);
429         return res;
430     }
431     res = native_window_set_buffers_format(mConsumer.get(),
432             camera3_stream::format);
433     if (res != OK) {
434         ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
435                 __FUNCTION__, camera3_stream::format, mId);
436         return res;
437     }
438 
439     res = native_window_set_buffers_data_space(mConsumer.get(),
440             camera3_stream::data_space);
441     if (res != OK) {
442         ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
443                 __FUNCTION__, camera3_stream::data_space, mId);
444         return res;
445     }
446 
447     int maxConsumerBuffers;
448     res = static_cast<ANativeWindow*>(mConsumer.get())->query(
449             mConsumer.get(),
450             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
451     if (res != OK) {
452         ALOGE("%s: Unable to query consumer undequeued"
453                 " buffer count for stream %d", __FUNCTION__, mId);
454         return res;
455     }
456 
457     ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
458             maxConsumerBuffers, camera3_stream::max_buffers);
459     if (camera3_stream::max_buffers == 0) {
460         ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
461                 __FUNCTION__, camera3_stream::max_buffers);
462         return INVALID_OPERATION;
463     }
464 
465     mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
466     mHandoutTotalBufferCount = 0;
467     mFrameCount = 0;
468     mLastTimestamp = 0;
469     mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
470 
471     res = native_window_set_buffer_count(mConsumer.get(),
472             mTotalBufferCount);
473     if (res != OK) {
474         ALOGE("%s: Unable to set buffer count for stream %d",
475                 __FUNCTION__, mId);
476         return res;
477     }
478 
479     res = native_window_set_buffers_transform(mConsumer.get(),
480             mTransform);
481     if (res != OK) {
482         ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
483                 __FUNCTION__, mTransform, strerror(-res), res);
484         return res;
485     }
486 
487     /**
488      * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
489      * buffers to be statically allocated for internal static buffer registration, while the
490      * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
491      * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
492      * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
493      * HAL3.2 devices may not support the dynamic buffer registeration.
494      * Also Camera3BufferManager does not support display/texture streams as they have its own
495      * buffer management logic.
496      */
497     if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
498             !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
499         uint64_t consumerUsage = 0;
500         getEndpointUsage(&consumerUsage);
501         StreamInfo streamInfo(
502                 getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
503                 mUsage | consumerUsage, mTotalBufferCount,
504                 /*isConfigured*/true);
505         wp<Camera3OutputStream> weakThis(this);
506         res = mBufferManager->registerStream(weakThis,
507                 streamInfo);
508         if (res == OK) {
509             // Disable buffer allocation for this BufferQueue, buffer manager will take over
510             // the buffer allocation responsibility.
511             mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
512             mUseBufferManager = true;
513         } else {
514             ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
515                   "(error %d %s), fall back to BufferQueue for buffer management!",
516                   __FUNCTION__, mId, res, strerror(-res));
517         }
518     }
519 
520     return OK;
521 }
522 
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)523 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
524     ATRACE_CALL();
525     status_t res;
526 
527     if ((res = getBufferPreconditionCheckLocked()) != OK) {
528         return res;
529     }
530 
531     bool gotBufferFromManager = false;
532 
533     if (mUseBufferManager) {
534         sp<GraphicBuffer> gb;
535         res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
536         if (res == OK) {
537             // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
538             // successful return.
539             *anb = gb.get();
540             res = mConsumer->attachBuffer(*anb);
541             if (shouldLogError(res, mState)) {
542                 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
543                         __FUNCTION__, mId, strerror(-res), res);
544             }
545             if (res != OK) {
546                 checkRetAndSetAbandonedLocked(res);
547                 return res;
548             }
549             gotBufferFromManager = true;
550             ALOGV("Stream %d: Attached new buffer", getId());
551         } else if (res == ALREADY_EXISTS) {
552             // Have sufficient free buffers already attached, can just
553             // dequeue from buffer queue
554             ALOGV("Stream %d: Reusing attached buffer", getId());
555             gotBufferFromManager = false;
556         } else if (res != OK) {
557             ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
558                     __FUNCTION__, mId, strerror(-res), res);
559             return res;
560         }
561     }
562     if (!gotBufferFromManager) {
563         /**
564          * Release the lock briefly to avoid deadlock for below scenario:
565          * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
566          * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
567          * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
568          * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
569          * StreamingProcessor lock.
570          * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
571          * and try to lock bufferQueue lock.
572          * Then there is circular locking dependency.
573          */
574         sp<ANativeWindow> currentConsumer = mConsumer;
575         mLock.unlock();
576 
577         nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
578         res = currentConsumer->dequeueBuffer(currentConsumer.get(), anb, fenceFd);
579         nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
580         mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
581 
582         mLock.lock();
583 
584         if (mUseBufferManager && res == TIMED_OUT) {
585             checkRemovedBuffersLocked();
586 
587             sp<GraphicBuffer> gb;
588             res = mBufferManager->getBufferForStream(
589                     getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
590 
591             if (res == OK) {
592                 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
593                 // a successful return.
594                 *anb = gb.get();
595                 res = mConsumer->attachBuffer(*anb);
596                 gotBufferFromManager = true;
597                 ALOGV("Stream %d: Attached new buffer", getId());
598 
599                 if (res != OK) {
600                     if (shouldLogError(res, mState)) {
601                         ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
602                                 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
603                     }
604                     checkRetAndSetAbandonedLocked(res);
605                     return res;
606                 }
607             } else {
608                 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
609                         " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
610                 return res;
611             }
612         } else if (res != OK) {
613             if (shouldLogError(res, mState)) {
614                 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
615                         __FUNCTION__, mId, strerror(-res), res);
616             }
617             checkRetAndSetAbandonedLocked(res);
618             return res;
619         }
620     }
621 
622     if (res == OK) {
623         checkRemovedBuffersLocked();
624     }
625 
626     return res;
627 }
628 
checkRemovedBuffersLocked(bool notifyBufferManager)629 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
630     std::vector<sp<GraphicBuffer>> removedBuffers;
631     status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
632     if (res == OK) {
633         onBuffersRemovedLocked(removedBuffers);
634 
635         if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
636             mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
637         }
638     }
639 }
640 
checkRetAndSetAbandonedLocked(status_t res)641 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
642     // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
643     // STATE_PREPARING, let prepareNextBuffer handle the error.)
644     if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
645         mState = STATE_ABANDONED;
646     }
647 }
648 
shouldLogError(status_t res,StreamState state)649 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
650     if (res == OK) {
651         return false;
652     }
653     if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
654         return false;
655     }
656     return true;
657 }
658 
disconnectLocked()659 status_t Camera3OutputStream::disconnectLocked() {
660     status_t res;
661 
662     if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
663         return res;
664     }
665 
666     // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
667     // state), don't need change the stream state, return OK.
668     if (mConsumer == nullptr) {
669         return OK;
670     }
671 
672     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
673 
674     res = native_window_api_disconnect(mConsumer.get(),
675                                        NATIVE_WINDOW_API_CAMERA);
676     /**
677      * This is not an error. if client calling process dies, the window will
678      * also die and all calls to it will return DEAD_OBJECT, thus it's already
679      * "disconnected"
680      */
681     if (res == DEAD_OBJECT) {
682         ALOGW("%s: While disconnecting stream %d from native window, the"
683                 " native window died from under us", __FUNCTION__, mId);
684     }
685     else if (res != OK) {
686         ALOGE("%s: Unable to disconnect stream %d from native window "
687               "(error %d %s)",
688               __FUNCTION__, mId, res, strerror(-res));
689         mState = STATE_ERROR;
690         return res;
691     }
692 
693     // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
694     // stream at this point should be safe.
695     if (mUseBufferManager) {
696         res = mBufferManager->unregisterStream(getId(), getStreamSetId());
697         if (res != OK) {
698             ALOGE("%s: Unable to unregister stream %d from buffer manager "
699                     "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
700             mState = STATE_ERROR;
701             return res;
702         }
703         // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
704         // the stream is still in usable state after this call.
705         mUseBufferManager = false;
706     }
707 
708     mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
709                                            : STATE_CONSTRUCTED;
710 
711     mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
712     mDequeueBufferLatency.reset();
713     return OK;
714 }
715 
getEndpointUsage(uint64_t * usage) const716 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
717 
718     status_t res;
719 
720     if (mConsumer == nullptr) {
721         // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
722         *usage = mConsumerUsage;
723         return OK;
724     }
725 
726     res = getEndpointUsageForSurface(usage, mConsumer);
727 
728     return res;
729 }
730 
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)731 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
732     if (consumerUsage == nullptr) {
733         return;
734     }
735 
736     // If an opaque output stream's endpoint is ImageReader, add
737     // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
738     // for the ZSL use case.
739     // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
740     //     1. GRALLOC_USAGE_HW_TEXTURE
741     //     2. GRALLOC_USAGE_HW_RENDER
742     //     3. GRALLOC_USAGE_HW_COMPOSER
743     //     4. GRALLOC_USAGE_HW_VIDEO_ENCODER
744     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
745             (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
746             GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
747         *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
748     }
749 }
750 
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface) const751 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
752         const sp<Surface>& surface) const {
753     status_t res;
754     uint64_t u = 0;
755 
756     res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
757     applyZSLUsageQuirk(camera3_stream::format, &u);
758     *usage = u;
759     return res;
760 }
761 
isVideoStream() const762 bool Camera3OutputStream::isVideoStream() const {
763     uint64_t usage = 0;
764     status_t res = getEndpointUsage(&usage);
765     if (res != OK) {
766         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
767         return false;
768     }
769 
770     return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
771 }
772 
setBufferManager(sp<Camera3BufferManager> bufferManager)773 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
774     Mutex::Autolock l(mLock);
775     if (mState != STATE_CONSTRUCTED) {
776         ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
777                 __FUNCTION__);
778         return INVALID_OPERATION;
779     }
780     mBufferManager = bufferManager;
781 
782     return OK;
783 }
784 
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)785 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
786             const std::vector<OutputStreamInfo> &/*outputInfo*/,
787             const std::vector<size_t> &/*removedSurfaceIds*/,
788             KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
789     ALOGE("%s: this method is not supported!", __FUNCTION__);
790     return INVALID_OPERATION;
791 }
792 
onBufferReleased()793 void Camera3OutputStream::BufferReleasedListener::onBufferReleased() {
794     sp<Camera3OutputStream> stream = mParent.promote();
795     if (stream == nullptr) {
796         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
797         return;
798     }
799 
800     Mutex::Autolock l(stream->mLock);
801     if (!(stream->mUseBufferManager)) {
802         return;
803     }
804 
805     ALOGV("Stream %d: Buffer released", stream->getId());
806     bool shouldFreeBuffer = false;
807     status_t res = stream->mBufferManager->onBufferReleased(
808         stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
809     if (res != OK) {
810         ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
811                 strerror(-res), res);
812         stream->mState = STATE_ERROR;
813     }
814 
815     if (shouldFreeBuffer) {
816         sp<GraphicBuffer> buffer;
817         // Detach and free a buffer (when buffer goes out of scope)
818         stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
819         if (buffer.get() != nullptr) {
820             stream->mBufferManager->notifyBufferRemoved(
821                     stream->getId(), stream->getStreamSetId());
822         }
823     }
824 }
825 
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)826 void Camera3OutputStream::onBuffersRemovedLocked(
827         const std::vector<sp<GraphicBuffer>>& removedBuffers) {
828     sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
829     if (callback != nullptr) {
830         for (const auto& gb : removedBuffers) {
831             callback->onBufferFreed(mId, gb->handle);
832         }
833     }
834 }
835 
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)836 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
837     Mutex::Autolock l(mLock);
838     return detachBufferLocked(buffer, fenceFd);
839 }
840 
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)841 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
842     ALOGV("Stream %d: detachBuffer", getId());
843     if (buffer == nullptr) {
844         return BAD_VALUE;
845     }
846 
847     sp<Fence> fence;
848     status_t res = mConsumer->detachNextBuffer(buffer, &fence);
849     if (res == NO_MEMORY) {
850         // This may rarely happen, which indicates that the released buffer was freed by other
851         // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
852         // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
853         // therefore log a warning.
854         *buffer = 0;
855         ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
856     } else if (res != OK) {
857         // Treat other errors as abandonment
858         if (shouldLogError(res, mState)) {
859             ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
860         }
861         mState = STATE_ABANDONED;
862         return res;
863     }
864 
865     if (fenceFd != nullptr) {
866         if (fence!= 0 && fence->isValid()) {
867             *fenceFd = fence->dup();
868         } else {
869             *fenceFd = -1;
870         }
871     }
872 
873     // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
874     checkRemovedBuffersLocked(/*notifyBufferManager*/false);
875     return res;
876 }
877 
dropBuffers(bool dropping)878 status_t Camera3OutputStream::dropBuffers(bool dropping) {
879     Mutex::Autolock l(mLock);
880     mDropBuffers = dropping;
881     return OK;
882 }
883 
getPhysicalCameraId() const884 const String8& Camera3OutputStream::getPhysicalCameraId() const {
885     Mutex::Autolock l(mLock);
886     return physicalCameraId();
887 }
888 
notifyBufferReleased(ANativeWindowBuffer *)889 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
890     return OK;
891 }
892 
isConsumerConfigurationDeferred(size_t surface_id) const893 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
894     Mutex::Autolock l(mLock);
895 
896     if (surface_id != 0) {
897         ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
898     }
899     return mConsumer == nullptr;
900 }
901 
setConsumers(const std::vector<sp<Surface>> & consumers)902 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
903     Mutex::Autolock l(mLock);
904     if (consumers.size() != 1) {
905         ALOGE("%s: it's illegal to set %zu consumer surfaces!",
906                   __FUNCTION__, consumers.size());
907         return INVALID_OPERATION;
908     }
909     if (consumers[0] == nullptr) {
910         ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
911         return INVALID_OPERATION;
912     }
913 
914     if (mConsumer != nullptr) {
915         ALOGE("%s: consumer surface was already set!", __FUNCTION__);
916         return INVALID_OPERATION;
917     }
918 
919     mConsumer = consumers[0];
920     return OK;
921 }
922 
isConsumedByHWComposer() const923 bool Camera3OutputStream::isConsumedByHWComposer() const {
924     uint64_t usage = 0;
925     status_t res = getEndpointUsage(&usage);
926     if (res != OK) {
927         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
928         return false;
929     }
930 
931     return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
932 }
933 
isConsumedByHWTexture() const934 bool Camera3OutputStream::isConsumedByHWTexture() const {
935     uint64_t usage = 0;
936     status_t res = getEndpointUsage(&usage);
937     if (res != OK) {
938         ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
939         return false;
940     }
941 
942     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
943 }
944 
945 }; // namespace camera3
946 
947 }; // namespace android
948