• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MEDIA_SYNC_H
18 #define MEDIA_SYNC_H
19 
20 #include <com_android_graphics_libgui_flags.h>
21 
22 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
23 #include <gui/BufferItemConsumer.h>
24 #else
25 #include <gui/IConsumerListener.h>
26 #endif
27 #include <gui/IProducerListener.h>
28 
29 #include <media/AudioResamplerPublic.h>
30 #include <media/AVSyncSettings.h>
31 #include <media/stagefright/foundation/AHandler.h>
32 
33 #include <utils/Condition.h>
34 #include <utils/KeyedVector.h>
35 #include <utils/Mutex.h>
36 
37 namespace android {
38 
39 class AudioTrack;
40 class BufferItem;
41 class Fence;
42 class GraphicBuffer;
43 #if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
44 class IGraphicBufferConsumer;
45 #endif
46 class IGraphicBufferProducer;
47 struct MediaClock;
48 struct VideoFrameScheduler;
49 
50 // MediaSync manages media playback and its synchronization to a media clock
51 // source. It can be also used for video-only playback.
52 //
53 // For video playback, it requires an output surface and provides an input
54 // surface. It then controls the rendering of input buffers (buffer queued to
55 // the input surface) on the output surface to happen at the appropriate time.
56 //
57 // For audio playback, it requires an audio track and takes updates of
58 // information of rendered audio data to maintain media clock when audio track
59 // serves as media clock source. (TODO: move audio rendering from JAVA to
60 // native code).
61 //
62 // It can use the audio or video track as media clock source, as well as an
63 // external clock. (TODO: actually support external clock as media clock
64 // sources; use video track as media clock source for audio-and-video stream).
65 //
66 // In video-only mode, MediaSync will playback every video frame even though
67 // a video frame arrives late based on its timestamp and last frame's.
68 //
69 // The client needs to configure surface (for output video rendering) and audio
70 // track (for querying information of audio rendering) for MediaSync.
71 //
72 // Then the client needs to obtain a surface from MediaSync and render video
73 // frames onto that surface. Internally, the MediaSync will receive those video
74 // frames and render them onto the output surface at the appropriate time.
75 //
76 // The client needs to call updateQueuedAudioData() immediately after it writes
77 // audio data to the audio track. Such information will be used to update media
78 // clock.
79 //
80 class MediaSync : public AHandler {
81 public:
82     // Create an instance of MediaSync.
83     static sp<MediaSync> create();
84 
85     // Called when MediaSync is used to render video. It should be called
86     // before createInputSurface().
87     status_t setSurface(const sp<IGraphicBufferProducer> &output);
88 
89     // Called when audio track is used as media clock source. It should be
90     // called before updateQueuedAudioData().
91     status_t setAudioTrack(const sp<AudioTrack> &audioTrack);
92 
93     // Create a surface for client to render video frames. This is the surface
94     // on which the client should render video frames. Those video frames will
95     // be internally directed to output surface for rendering at appropriate
96     // time.
97     status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);
98 
99     // Update just-rendered audio data size and the presentation timestamp of
100     // the first frame of that audio data. It should be called immediately
101     // after the client write audio data into AudioTrack.
102     // This function assumes continous audio stream.
103     // TODO: support gap or backwards updates.
104     status_t updateQueuedAudioData(
105             size_t sizeInBytes, int64_t presentationTimeUs);
106 
107     // Set the consumer name of the input queue.
108     void setName(const AString &name);
109 
110     // Get the media clock used by the MediaSync so that the client can obtain
111     // corresponding media time or real time via
112     // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
113     sp<const MediaClock> getMediaClock();
114 
115     // Flush mediasync
116     void flush();
117 
118     // Set the video frame rate hint - this is used by the video FrameScheduler
119     status_t setVideoFrameRateHint(float rate);
120 
121     // Get the video frame rate measurement from the FrameScheduler
122     // returns -1 if there is no measurement
123     float getVideoFrameRate();
124 
125     // Set the sync settings parameters.
126     status_t setSyncSettings(const AVSyncSettings &syncSettings);
127 
128     // Gets the sync settings parameters.
129     void getSyncSettings(AVSyncSettings *syncSettings /* nonnull */);
130 
131     // Sets the playback rate using playback settings.
132     // This method can be called any time.
133     status_t setPlaybackSettings(const AudioPlaybackRate &rate);
134 
135     // Gets the playback rate (playback settings parameters).
136     void getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);
137 
138     // Get the play time for pending audio frames in audio sink.
139     status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
140 
141 protected:
142     virtual void onMessageReceived(const sp<AMessage> &msg);
143 
144 private:
145     enum {
146         kWhatDrainVideo = 'dVid',
147     };
148 
149     // This is a thin wrapper class that lets us listen to
150     // IConsumerListener::onFrameAvailable from mInput.
151 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
152     class InputListener : public BufferItemConsumer::FrameAvailableListener {
153 #else
154     class InputListener : public IConsumerListener, public IBinder::DeathRecipient {
155 #endif
156       public:
157         InputListener(const sp<MediaSync> &sync);
158         virtual ~InputListener();
159 
160         // From FrameAvailableListener
161         virtual void onFrameAvailable(const BufferItem&) override;
162 
163 #if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
164 
165         // From IConsumerListener
166         // We don't care about released buffers because we detach each buffer as
167         // soon as we acquire it. See the comment for onBufferReleased below for
168         // some clarifying notes about the name.
onBuffersReleased()169         virtual void onBuffersReleased() {}
170 
171         // From IConsumerListener
172         // We don't care about sideband streams, since we won't relay them.
173         virtual void onSidebandStreamChanged();
174 
175         // From IBinder::DeathRecipient
176         virtual void binderDied(const wp<IBinder> &who);
177 #endif
178 
179       private:
180         sp<MediaSync> mSync;
181     };
182 
183     // This is a thin wrapper class that lets us listen to
184     // IProducerListener::onBufferReleased from mOutput.
185     class OutputListener : public BnProducerListener,
186                            public IBinder::DeathRecipient {
187     public:
188         OutputListener(const sp<MediaSync> &sync, const sp<IGraphicBufferProducer> &output);
189         virtual ~OutputListener();
190 
191         // From IProducerListener
192         virtual void onBufferReleased();
193 
194         // From IBinder::DeathRecipient
195         virtual void binderDied(const wp<IBinder> &who);
196 
197     private:
198         sp<MediaSync> mSync;
199         sp<IGraphicBufferProducer> mOutput;
200     };
201 
202     // mIsAbandoned is set to true when the input or output dies.
203     // Once the MediaSync has been abandoned by one side, it will disconnect
204     // from the other side and not attempt to communicate with it further.
205     bool mIsAbandoned;
206 
207     mutable Mutex mMutex;
208     Condition mReleaseCondition;
209     size_t mNumOutstandingBuffers;
210 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
211     sp<BufferItemConsumer> mInput;
212     sp<InputListener> mListener;  // listener for mInput, so the reference isn't dropped.
213 #else
214     sp<IGraphicBufferConsumer> mInput;
215 #endif
216     sp<IGraphicBufferProducer> mOutput;
217     int mUsageFlagsFromOutput;
218     uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
219     bool mReturnPendingInputFrame;    // set while we are pending before acquiring an input frame
220 
221     sp<AudioTrack> mAudioTrack;
222     uint32_t mNativeSampleRateInHz;
223     int64_t mNumFramesWritten;
224     bool mHasAudio;
225 
226     int64_t mNextBufferItemMediaUs;
227     List<BufferItem> mBufferItems;
228     sp<VideoFrameScheduler> mFrameScheduler;
229 
230     // Keep track of buffers received from |mInput|. This is needed because
231     // it's possible the consumer of |mOutput| could return a different
232     // GraphicBuffer::handle (e.g., due to passing buffers through IPC),
233     // and that could cause problem if the producer of |mInput| only
234     // supports pre-registered buffers.
235     KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersFromInput;
236 
237     // Keep track of buffers sent to |mOutput|. When a new output surface comes
238     // in, those buffers will be returned to input and old output surface will
239     // be disconnected immediately.
240     KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersSentToOutput;
241 
242     sp<ALooper> mLooper;
243     float mPlaybackRate;
244 
245     AudioPlaybackRate mPlaybackSettings;
246     AVSyncSettings mSyncSettings;
247 
248     sp<MediaClock> mMediaClock;
249 
250     MediaSync();
251 
252     // Must be accessed through RefBase
253     virtual ~MediaSync();
254 
255     int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
256     int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
257     int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);
258 
259     void onDrainVideo_l();
260 
261     // This implements the onFrameAvailable callback from IConsumerListener.
262     // It gets called from an InputListener.
263     // During this callback, we detach the buffer from the input, and queue
264     // it for rendering on the output. This call can block if there are too
265     // many outstanding buffers. If it blocks, it will resume when
266     // onBufferReleasedByOutput releases a buffer back to the input.
267     void onFrameAvailableFromInput();
268 
269     // Send |bufferItem| to the output for rendering.
270     void renderOneBufferItem_l(const BufferItem &bufferItem);
271 
272     // This implements the onBufferReleased callback from IProducerListener.
273     // It gets called from an OutputListener.
274     // During this callback, we detach the buffer from the output, and release
275     // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
276     void onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output);
277 
278     // Return |buffer| back to the input.
279     void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
280 
281     // When this is called, the MediaSync disconnects from (i.e., abandons) its
282     // input or output, and signals any waiting onFrameAvailable calls to wake
283     // up. This must be called with mMutex locked.
284     void onAbandoned_l(bool isInput);
285 
286     // Set the playback in a desired speed.
287     // This method can be called any time.
288     // |rate| is the ratio between desired speed and the normal one, and should
289     // be non-negative. The meaning of rate values:
290     // 1.0 -- normal playback
291     // 0.0 -- stop or pause
292     // larger than 1.0 -- faster than normal speed
293     // between 0.0 and 1.0 -- slower than normal speed
294     void updatePlaybackRate_l(float rate);
295 
296     // apply new sync settings
297     void resync_l();
298 
299     // apply playback settings only - without resyncing or updating playback rate
300     status_t setPlaybackSettings_l(const AudioPlaybackRate &rate);
301 
302     // helper.
isPlaying()303     bool isPlaying() { return mPlaybackRate != 0.0; }
304 
305     DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
306 };
307 
308 } // namespace android
309 
310 #endif
311