• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef GRAPHIC_BUFFER_SOURCE_H_
18 
19 #define GRAPHIC_BUFFER_SOURCE_H_
20 
21 #include <binder/Status.h>
22 #include <utils/RefBase.h>
23 
24 #include <media/hardware/VideoAPI.h>
25 #include <media/stagefright/foundation/ABase.h>
26 #include <media/stagefright/foundation/AHandlerReflector.h>
27 #include <media/stagefright/foundation/ALooper.h>
28 #include <media/stagefright/bqhelper/ComponentWrapper.h>
29 #include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
30 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
31 
32 namespace android {
33 
34 struct FrameDropper;
35 class BufferItem;
36 class IGraphicBufferProducer;
37 class IGraphicBufferConsumer;
38 /*
39  * This class is used to feed codecs from a Surface via BufferQueue or
40  * HW producer.
41  *
42  * Instances of the class don't run on a dedicated thread.  Instead,
43  * various events trigger data movement:
44  *
45  *  - Availability of a new frame of data from the BufferQueue (notified
46  *    via the onFrameAvailable callback).
47  *  - The return of a codec buffer.
48  *  - Application signaling end-of-stream.
49  *  - Transition to or from "executing" state.
50  *
51  * Frames of data (and, perhaps, the end-of-stream indication) can arrive
52  * before the codec is in the "executing" state, so we need to queue
53  * things up until we're ready to go.
54  *
55  * The GraphicBufferSource can be configure dynamically to discard frames
56  * from the source:
57  *
58  * - if their timestamp is less than a start time
59  * - if the source is suspended or stopped and the suspend/stop-time is reached
60  * - if EOS was signaled
61  * - if there is no encoder connected to it
62  *
63  * The source, furthermore, may choose to not encode (drop) frames if:
64  *
65  * - to throttle the frame rate (keep it under a certain limit)
66  *
67  * Finally the source may optionally hold onto the last non-discarded frame
68  * (even if it was dropped) to reencode it after an interval if no further
69  * frames are sent by the producer.
70  */
71 class GraphicBufferSource : public RefBase {
72 public:
73     GraphicBufferSource();
74 
75     virtual ~GraphicBufferSource();
76 
77     // We can't throw an exception if the constructor fails, so we just set
78     // this and require that the caller test the value.
initCheck()79     status_t initCheck() const {
80         return mInitCheck;
81     }
82 
83     // Returns the handle to the producer side of the BufferQueue.  Buffers
84     // queued on this will be received by GraphicBufferSource.
85     sp<IGraphicBufferProducer> getIGraphicBufferProducer() const;
86 
87     // Returns the handle to the bufferqueue HAL (V1_0) producer side of the BufferQueue.
88     // Buffers queued on this will be received by GraphicBufferSource.
89     sp<::android::hardware::graphics::bufferqueue::V1_0::IGraphicBufferProducer>
90         getHGraphicBufferProducer_V1_0() const;
91 
92     // Returns the handle to the bufferqueue HAL producer side of the BufferQueue.
93     // Buffers queued on this will be received by GraphicBufferSource.
94     sp<::android::hardware::graphics::bufferqueue::V2_0::IGraphicBufferProducer>
95         getHGraphicBufferProducer() const;
96 
97     // This is called when component transitions to running state, which means
98     // we can start handing it buffers.  If we already have buffers of data
99     // sitting in the BufferQueue, this will send them to the codec.
100     status_t start();
101 
102     // This is called when component transitions to stopped, indicating that
103     // the codec is meant to return all buffers back to the client for them
104     // to be freed. Do NOT submit any more buffers to the component.
105     status_t stop();
106 
107     // This is called when component transitions to released, indicating that
108     // we are shutting down.
109     status_t release();
110 
111     // A "codec buffer", i.e. a buffer that can be used to pass data into
112     // the encoder, has been allocated.  (This call does not call back into
113     // component.)
114     status_t onInputBufferAdded(int32_t bufferId);
115 
116     // Called when encoder is no longer using the buffer.  If we have a BQ
117     // buffer available, fill it with a new frame of data; otherwise, just mark
118     // it as available.
119     status_t onInputBufferEmptied(int32_t bufferId, int fenceFd);
120 
121     // IGraphicBufferSource interface
122     // ------------------------------
123 
124     // Configure the buffer source to be used with a component with the default
125     // data space. (32-bit consumerUsage flag, for vendor partition
126     // compatibility)
127     [[deprecated("use configure() with a 64-bit consumerUsage flag instead")]]
128     status_t configure(
129         const sp<ComponentWrapper> &component,
130         int32_t dataSpace,
131         int32_t bufferCount,
132         uint32_t frameWidth,
133         uint32_t frameHeight,
134         uint32_t consumerUsage);
135 
136     // Configure the buffer source to be used with a component with the default
137     // data space. (64-bit consumerUsage flag)
138     status_t configure(
139         const sp<ComponentWrapper> &component,
140         int32_t dataSpace,
141         int32_t bufferCount,
142         uint32_t frameWidth,
143         uint32_t frameHeight,
144         uint64_t consumerUsage);
145 
146     // This is called after the last input frame has been submitted or buffer
147     // timestamp is greater or equal than stopTimeUs. We need to submit an empty
148     // buffer with the EOS flag set.  If we don't have a codec buffer ready,
149     // we just set the mEndOfStream flag.
150     status_t signalEndOfInputStream();
151 
152     // If suspend is true, all incoming buffers (including those currently
153     // in the BufferQueue) with timestamp larger than timeUs will be discarded
154     // until the suspension is lifted. If suspend is false, all incoming buffers
155     // including those currently in the BufferQueue) with timestamp larger than
156     // timeUs will be processed. timeUs uses SYSTEM_TIME_MONOTONIC time base.
157     status_t setSuspend(bool suspend, int64_t timeUs);
158 
159     // Specifies the interval after which we requeue the buffer previously
160     // queued to the encoder. This is useful in the case of surface flinger
161     // providing the input surface if the resulting encoded stream is to
162     // be displayed "live". If we were not to push through the extra frame
163     // the decoder on the remote end would be unable to decode the latest frame.
164     // This API must be called before transitioning the encoder to "executing"
165     // state and once this behaviour is specified it cannot be reset.
166     status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
167 
168     // Sets the input buffer timestamp offset.
169     // When set, the sample's timestamp will be adjusted with the timeOffsetUs.
170     status_t setTimeOffsetUs(int64_t timeOffsetUs);
171 
172     /*
173      * Set the maximum frame rate on the source.
174      *
175      * When maxFps is a positive number, it indicates the maximum rate at which
176      * the buffers from this source will be sent to the encoder. Excessive
177      * frames will be dropped to meet the frame rate requirement.
178      *
179      * When maxFps is a negative number, any frame drop logic will be disabled
180      * and all frames from this source will be sent to the encoder, even when
181      * the timestamp goes backwards. Note that some components may still drop
182      * out-of-order frames silently, so this usually has to be used in
183      * conjunction with OMXNodeInstance::setMaxPtsGapUs() workaround.
184      *
185      * When maxFps is 0, this call will fail with BAD_VALUE.
186      */
187     status_t setMaxFps(float maxFps);
188 
189     // Sets the time lapse (or slow motion) parameters.
190     // When set, the sample's timestamp will be modified to playback framerate,
191     // and capture timestamp will be modified to capture rate.
192     status_t setTimeLapseConfig(double fps, double captureFps);
193 
194     // Sets the start time us (in system time), samples before which should
195     // be dropped and not submitted to encoder
196     status_t setStartTimeUs(int64_t startTimeUs);
197 
198     // Sets the stop time us (in system time), samples after which should be dropped
199     // and not submitted to encoder. timeUs uses SYSTEM_TIME_MONOTONIC time base.
200     status_t setStopTimeUs(int64_t stopTimeUs);
201 
202     // Gets the stop time offset in us. This is the time offset between latest buffer
203     // time and the stopTimeUs. If stop time is not set, INVALID_OPERATION will be returned.
204     // If return is OK, *stopTimeOffsetUs will contain the valid offset. Otherwise,
205     // *stopTimeOffsetUs will not be modified. Positive stopTimeOffsetUs means buffer time
206     // larger than stopTimeUs.
207     status_t getStopTimeOffsetUs(int64_t *stopTimeOffsetUs);
208 
209     // Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
210     status_t setColorAspects(int32_t aspectsPacked);
211 
212 protected:
213 
214     // BufferQueue::ConsumerListener interface, called when a new frame of
215     // data is available.  If we're executing and a codec buffer is
216     // available, we acquire the buffer, copy the GraphicBuffer reference
217     // into the codec buffer, and call Empty[This]Buffer.  If we're not yet
218     // executing or there's no codec buffer available, we just increment
219     // mNumFramesAvailable and return.
220     void onFrameAvailable(const BufferItem& item) ;
221 
222     // BufferQueue::ConsumerListener interface, called when the client has
223     // released one or more GraphicBuffers.  We clear out the appropriate
224     // set of mBufferSlot entries.
225     void onBuffersReleased() ;
226 
227     // BufferQueue::ConsumerListener interface, called when the client has
228     // changed the sideband stream. GraphicBufferSource doesn't handle sideband
229     // streams so this is a no-op (and should never be called).
230     void onSidebandStreamChanged() ;
231 
232 private:
233     // BQ::ConsumerListener interface
234     // ------------------------------
235     struct ConsumerProxy;
236     sp<ConsumerProxy> mConsumerProxy;
237 
238     // Lock, covers all member variables.
239     mutable Mutex mMutex;
240 
241     // Used to report constructor failure.
242     status_t mInitCheck;
243 
244     // Graphic buffer reference objects
245     // --------------------------------
246 
247     // These are used to keep a shared reference to GraphicBuffers and gralloc handles owned by the
248     // GraphicBufferSource as well as to manage the cache slots. Separate references are owned by
249     // the buffer cache (controlled by the buffer queue/buffer producer) and the codec.
250 
251     // When we get a buffer from the producer (BQ) it designates them to be cached into specific
252     // slots. Each slot owns a shared reference to the graphic buffer (we track these using
253     // CachedBuffer) that is in that slot, but the producer controls the slots.
254     struct CachedBuffer;
255 
256     // When we acquire a buffer, we must release it back to the producer once we (or the codec)
257     // no longer uses it (as long as the buffer is still in the cache slot). We use shared
258     // AcquiredBuffer instances for this purpose - and we call release buffer when the last
259     // reference is relinquished.
260     struct AcquiredBuffer;
261 
262     // We also need to keep some extra metadata (other than the buffer reference) for acquired
263     // buffers. These are tracked in VideoBuffer struct.
264     struct VideoBuffer {
265         std::shared_ptr<AcquiredBuffer> mBuffer;
266         nsecs_t mTimestampNs;
267         android_dataspace_t mDataspace;
268     };
269 
270     // Cached and aquired buffers
271     // --------------------------------
272 
273     typedef int slot_id;
274 
275     // Maps a slot to the cached buffer in that slot
276     KeyedVector<slot_id, std::shared_ptr<CachedBuffer>> mBufferSlots;
277 
278     // Queue of buffers acquired in chronological order that are not yet submitted to the codec
279     List<VideoBuffer> mAvailableBuffers;
280 
281     // Number of buffers that have been signaled by the producer that they are available, but
282     // we've been unable to acquire them due to our max acquire count
283     int32_t mNumAvailableUnacquiredBuffers;
284 
285     // Number of frames acquired from consumer (debug only)
286     // (as in aquireBuffer called, and release needs to be called)
287     int32_t mNumOutstandingAcquires;
288 
289     // Acquire a buffer from the BQ and store it in |item| if successful
290     // \return OK on success, or error on failure.
291     status_t acquireBuffer_l(VideoBuffer *item);
292 
293     // Called when a buffer was acquired from the producer
294     void onBufferAcquired_l(const VideoBuffer &buffer);
295 
296     // marks the buffer at the slot no longer cached, and accounts for the outstanding
297     // acquire count. Returns true if the slot was populated; otherwise, false.
298     bool discardBufferInSlot_l(slot_id i);
299 
300     // marks the buffer at the slot index no longer cached, and accounts for the outstanding
301     // acquire count
302     void discardBufferAtSlotIndex_l(ssize_t bsi);
303 
304     // release all acquired and unacquired available buffers
305     // This method will return if it fails to acquire an unacquired available buffer, which will
306     // leave mNumAvailableUnacquiredBuffers positive on return.
307     void releaseAllAvailableBuffers_l();
308 
309     // returns whether we have any available buffers (acquired or not-yet-acquired)
haveAvailableBuffers_l()310     bool haveAvailableBuffers_l() const {
311         return !mAvailableBuffers.empty() || mNumAvailableUnacquiredBuffers > 0;
312     }
313 
314     // Codec buffers
315     // -------------
316 
317     // When we queue buffers to the encoder, we must hold the references to the graphic buffers
318     // in those buffers - as the producer may free the slots.
319 
320     typedef int32_t codec_buffer_id;
321 
322     // set of codec buffer ID-s of buffers available to fill
323     List<codec_buffer_id> mFreeCodecBuffers;
324 
325     // maps codec buffer ID-s to buffer info submitted to the codec. Used to keep a reference for
326     // the graphics buffer.
327     KeyedVector<codec_buffer_id, std::shared_ptr<AcquiredBuffer>> mSubmittedCodecBuffers;
328 
329     // Processes the next acquired frame. If there is no available codec buffer, it returns false
330     // without any further action.
331     //
332     // Otherwise, it consumes the next acquired frame and determines if it needs to be discarded or
333     // dropped. If neither are needed, it submits it to the codec. It also saves the latest
334     // non-dropped frame and submits it for repeat encoding (if this is enabled).
335     //
336     // \require there must be an acquired frame (i.e. we're in the onFrameAvailable callback,
337     // or if we're in codecBufferEmptied and mNumFramesAvailable is nonzero).
338     // \require codec must be executing
339     // \returns true if acquired (and handled) the next frame. Otherwise, false.
340     bool fillCodecBuffer_l();
341 
342     // Calculates the media timestamp for |item| and on success it submits the buffer to the codec,
343     // while also keeping a reference for it in mSubmittedCodecBuffers.
344     // Returns UNKNOWN_ERROR if the buffer was not submitted due to buffer timestamp. Otherwise,
345     // it returns any submit success or error value returned by the codec.
346     status_t submitBuffer_l(const VideoBuffer &item);
347 
348     // Submits an empty buffer, with the EOS flag set if there is an available codec buffer and
349     // sets mEndOfStreamSent flag. Does nothing if there is no codec buffer available.
350     void submitEndOfInputStream_l();
351 
352     // Set to true if we want to send end-of-stream after we run out of available frames from the
353     // producer
354     bool mEndOfStream;
355 
356     // Flag that the EOS was submitted to the encoder
357     bool mEndOfStreamSent;
358 
359     // Dataspace for the last frame submitted to the codec
360     android_dataspace mLastDataspace;
361 
362     // Default color aspects for this source
363     int32_t mDefaultColorAspectsPacked;
364 
365     // called when the data space of the input buffer changes
366     void onDataspaceChanged_l(android_dataspace dataspace, android_pixel_format pixelFormat);
367 
368     // Pointer back to the component that created us.  We send buffers here.
369     sp<ComponentWrapper> mComponent;
370 
371     // Set by start() / stop().
372     bool mExecuting;
373 
374     bool mSuspended;
375 
376     // returns true if this source is unconditionally discarding acquired buffers at the moment
377     // regardless of the metadata of those buffers
378     bool areWeDiscardingAvailableBuffers_l();
379 
380     int64_t mLastFrameTimestampUs;
381 
382     // Our BufferQueue interfaces. mProducer is passed to the producer through
383     // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
384     // the buffers queued by the producer.
385     sp<IGraphicBufferProducer> mProducer;
386     sp<IGraphicBufferConsumer> mConsumer;
387 
388     // The time to stop sending buffers.
389     int64_t mStopTimeUs;
390 
391     struct ActionItem {
392         typedef enum {
393             PAUSE,
394             RESUME,
395             STOP
396         } ActionType;
397         ActionType mAction;
398         int64_t mActionTimeUs;
399     };
400 
401     // Maintain last action timestamp to ensure all the action timestamps are
402     // monotonically increasing.
403     int64_t mLastActionTimeUs;
404 
405     // An action queue that queue up all the actions sent to GraphicBufferSource.
406     // STOP action should only show up at the end of the list as all the actions
407     // after a STOP action will be discarded. mActionQueue is protected by mMutex.
408     List<ActionItem> mActionQueue;
409 
410     ////
411     friend struct AHandlerReflector<GraphicBufferSource>;
412 
413     enum {
414         kWhatRepeatLastFrame,   ///< queue last frame for reencoding
415     };
416     enum {
417         kRepeatLastFrameCount = 10,
418     };
419 
420     int64_t mSkipFramesBeforeNs;
421 
422     sp<FrameDropper> mFrameDropper;
423 
424     sp<ALooper> mLooper;
425     sp<AHandlerReflector<GraphicBufferSource> > mReflector;
426 
427     // Repeat last frame feature
428     // -------------------------
429     // configuration parameter: repeat interval for frame repeating (<0 if repeating is disabled)
430     int64_t mFrameRepeatIntervalUs;
431 
432     // current frame repeat generation - used to cancel a pending frame repeat
433     int32_t mRepeatLastFrameGeneration;
434 
435     // number of times to repeat latest frame (0 = none)
436     int32_t mOutstandingFrameRepeatCount;
437 
438     // The previous buffer should've been repeated but
439     // no codec buffer was available at the time.
440     bool mFrameRepeatBlockedOnCodecBuffer;
441 
442     // hold a reference to the last acquired (and not discarded) frame for frame repeating
443     VideoBuffer mLatestBuffer;
444 
445     // queue last frame for reencode after the repeat interval.
446     void queueFrameRepeat_l();
447 
448     // save |item| as the latest buffer and queue it for reencode (repeat)
449     void setLatestBuffer_l(const VideoBuffer &item);
450 
451     // submit last frame to encoder and queue it for reencode
452     // \return true if buffer was submitted, false if it wasn't (e.g. source is suspended, there
453     // is no available codec buffer)
454     bool repeatLatestBuffer_l();
455 
456     // Time lapse / slow motion configuration
457     // --------------------------------------
458 
459     // desired frame rate for encoding - value <= 0 if undefined
460     double mFps;
461 
462     // desired frame rate for capture - value <= 0 if undefined
463     double mCaptureFps;
464 
465     // Time lapse mode is enabled if the capture frame rate is defined and it is
466     // smaller than half the encoding frame rate (if defined). In this mode,
467     // frames that come in between the capture interval (the reciprocal of the
468     // capture frame rate) are dropped and the encoding timestamp is adjusted to
469     // match the desired encoding frame rate.
470     //
471     // Slow motion mode is enabled if both encoding and capture frame rates are
472     // defined and the encoding frame rate is less than half the capture frame
473     // rate. In this mode, the source is expected to produce frames with an even
474     // timestamp interval (after rounding) with the configured capture fps.
475     //
476     // These modes must be configured by calling setTimeLapseConfig() before
477     // using this source.
478     //
479     // Timestamp snapping for slow motion recording
480     // ============================================
481     //
482     // When the slow motion mode is configured with setTimeLapseConfig(), the
483     // property "debug.stagefright.snap_timestamps" will be checked. If the
484     // value of the property is set to any value other than 1, mSnapTimestamps
485     // will be set to false. Otherwise, mSnapTimestamps will be set to true.
486     // (mSnapTimestamps will be false for time lapse recording regardless of the
487     // value of the property.)
488     //
489     // If mSnapTimestamps is true, i.e., timestamp snapping is enabled, the
490     // first source timestamp will be used as the source base time; afterwards,
491     // the timestamp of each source frame will be snapped to the nearest
492     // expected capture timestamp and scaled to match the configured encoding
493     // frame rate.
494     //
495     // If timestamp snapping is disabled, the timestamp of source frames will
496     // be scaled to match the ratio between the configured encoding frame rate
497     // and the configured capture frame rate.
498 
499     // whether timestamps will be snapped
500     bool mSnapTimestamps{true};
501 
502     // adjusted capture timestamp of the base frame
503     int64_t mBaseCaptureUs;
504 
505     // adjusted encoding timestamp of the base frame
506     int64_t mBaseFrameUs;
507 
508     // number of frames from the base time
509     int64_t mFrameCount;
510 
511     // adjusted capture timestamp for previous frame (negative if there were
512     // none)
513     int64_t mPrevCaptureUs;
514 
515     // adjusted media timestamp for previous frame (negative if there were none)
516     int64_t mPrevFrameUs;
517 
518     // desired offset between media time and capture time
519     int64_t mInputBufferTimeOffsetUs;
520 
521     // Calculates and outputs the timestamp to use for a buffer with a specific buffer timestamp
522     // |bufferTimestampNs|. Returns false on failure (buffer too close or timestamp is moving
523     // backwards). Otherwise, stores the media timestamp in |*codecTimeUs| and returns true.
524     //
525     // This method takes into account the start time offset and any time lapse or slow motion time
526     // adjustment requests.
527     bool calculateCodecTimestamp_l(nsecs_t bufferTimeNs, int64_t *codecTimeUs);
528 
529     void onMessageReceived(const sp<AMessage> &msg);
530 
531     DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
532 };
533 
534 }  // namespace android
535 
536 #endif  // GRAPHIC_BUFFER_SOURCE_H_
537