• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "WebmFrameThread"
19 
20 #include "WebmConstants.h"
21 #include "WebmFrameThread.h"
22 
23 #include <media/stagefright/MetaData.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 
26 #include <utils/Log.h>
27 #include <inttypes.h>
28 
29 using namespace webm;
30 
31 namespace android {
32 
wrap(void * arg)33 void *WebmFrameThread::wrap(void *arg) {
34     WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
35     worker->run();
36     return NULL;
37 }
38 
start()39 status_t WebmFrameThread::start() {
40     status_t err = OK;
41     pthread_attr_t attr;
42     pthread_attr_init(&attr);
43     pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
44     if ((err = pthread_create(&mThread, &attr, WebmFrameThread::wrap, this))) {
45         mThread = 0;
46     }
47     pthread_attr_destroy(&attr);
48     return err;
49 }
50 
stop()51 status_t WebmFrameThread::stop() {
52     void *status = nullptr;
53     if (mThread) {
54         pthread_join(mThread, &status);
55         mThread = 0;
56     }
57     return (status_t)(intptr_t)status;
58 }
59 
60 //=================================================================================================
61 
WebmFrameSourceThread(int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink)62 WebmFrameSourceThread::WebmFrameSourceThread(
63     int type,
64     LinkedBlockingQueue<const sp<WebmFrame> >& sink)
65     : mType(type), mSink(sink) {
66 }
67 
68 //=================================================================================================
69 
WebmFrameSinkThread(const int & fd,const uint64_t & off,sp<WebmFrameSourceThread> videoThread,sp<WebmFrameSourceThread> audioThread,List<sp<WebmElement>> & cues)70 WebmFrameSinkThread::WebmFrameSinkThread(
71         const int& fd,
72         const uint64_t& off,
73         sp<WebmFrameSourceThread> videoThread,
74         sp<WebmFrameSourceThread> audioThread,
75         List<sp<WebmElement> >& cues)
76     : mFd(fd),
77       mSegmentDataStart(off),
78       mVideoFrames(videoThread->mSink),
79       mAudioFrames(audioThread->mSink),
80       mCues(cues),
81       mDone(true) {
82 }
83 
WebmFrameSinkThread(const int & fd,const uint64_t & off,LinkedBlockingQueue<const sp<WebmFrame>> & videoSource,LinkedBlockingQueue<const sp<WebmFrame>> & audioSource,List<sp<WebmElement>> & cues)84 WebmFrameSinkThread::WebmFrameSinkThread(
85         const int& fd,
86         const uint64_t& off,
87         LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
88         LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
89         List<sp<WebmElement> >& cues)
90     : mFd(fd),
91       mSegmentDataStart(off),
92       mVideoFrames(videoSource),
93       mAudioFrames(audioSource),
94       mCues(cues),
95       mDone(true) {
96 }
97 
98 // Initializes a webm cluster with its starting timecode.
99 //
100 // frames:
101 //   sequence of input audio/video frames received from the source.
102 //
103 // clusterTimecodeL:
104 //   the starting timecode of the cluster; this is the timecode of the first
105 //   frame since frames are ordered by timestamp.
106 //
107 // children:
108 //   list to hold child elements in a webm cluster (start timecode and
109 //   simple blocks).
110 //
111 // static
initCluster(List<const sp<WebmFrame>> & frames,uint64_t & clusterTimecodeL,List<sp<WebmElement>> & children)112 void WebmFrameSinkThread::initCluster(
113     List<const sp<WebmFrame> >& frames,
114     uint64_t& clusterTimecodeL,
115     List<sp<WebmElement> >& children) {
116     CHECK(!frames.empty() && children.empty());
117 
118     const sp<WebmFrame> f = *(frames.begin());
119     clusterTimecodeL = f->mAbsTimecode;
120     WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
121     children.clear();
122     children.push_back(clusterTimecode);
123 }
124 
writeCluster(List<sp<WebmElement>> & children)125 void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
126     // children must contain at least one simpleblock and its timecode
127     CHECK_GE(children.size(), 2);
128 
129     uint64_t size;
130     sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
131     cluster->write(mFd, size);
132     children.clear();
133 }
134 
135 // Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
136 //
137 // last:
138 //   current flush is triggered by EOS instead of a second outstanding video key frame.
flushFrames(List<const sp<WebmFrame>> & frames,bool last)139 void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
140     if (frames.empty()) {
141         return;
142     }
143 
144     uint64_t clusterTimecodeL;
145     List<sp<WebmElement> > children;
146     initCluster(frames, clusterTimecodeL, children);
147 
148     uint64_t cueTime = clusterTimecodeL;
149     off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
150     size_t n = frames.size();
151     if (!last) {
152         // If we are not flushing the last sequence of outstanding frames, flushFrames
153         // must have been called right after we have pushed a second outstanding video key
154         // frame (the last frame), which belongs to the next cluster; also hold back on
155         // flushing the second to last frame before we check its type. A audio frame
156         // should precede the aforementioned video key frame in the next sequence, a video
157         // frame should be the last frame in the current (to-be-flushed) sequence.
158         CHECK_GE(n, 2);
159         n -= 2;
160     }
161 
162     for (size_t i = 0; i < n; i++) {
163         const sp<WebmFrame> f = *(frames.begin());
164         if (f->mType == kVideoType && f->mKey) {
165             cueTime = f->mAbsTimecode;
166         }
167 
168         if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
169             writeCluster(children);
170             initCluster(frames, clusterTimecodeL, children);
171         }
172 
173         frames.erase(frames.begin());
174         children.push_back(f->SimpleBlock(clusterTimecodeL));
175     }
176 
177     // equivalent to last==false
178     if (!frames.empty()) {
179         // decide whether to write out the second to last frame.
180         const sp<WebmFrame> secondLastFrame = *(frames.begin());
181         if (secondLastFrame->mType == kVideoType) {
182             frames.erase(frames.begin());
183             children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
184         }
185     }
186 
187     writeCluster(children);
188     sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
189     mCues.push_back(cuePoint);
190 }
191 
start()192 status_t WebmFrameSinkThread::start() {
193     mDone = false;
194     return WebmFrameThread::start();
195 }
196 
stop()197 status_t WebmFrameSinkThread::stop() {
198     mDone = true;
199     mVideoFrames.push(WebmFrame::EOS);
200     mAudioFrames.push(WebmFrame::EOS);
201     return WebmFrameThread::stop();
202 }
203 
run()204 void WebmFrameSinkThread::run() {
205     int numVideoKeyFrames = 0;
206     List<const sp<WebmFrame> > outstandingFrames;
207     while (!mDone) {
208         ALOGV("wait v frame");
209         const sp<WebmFrame> videoFrame = mVideoFrames.peek();
210         ALOGV("v frame: %p", videoFrame.get());
211 
212         ALOGV("wait a frame");
213         const sp<WebmFrame> audioFrame = mAudioFrames.peek();
214         ALOGV("a frame: %p", audioFrame.get());
215 
216         if (videoFrame->mEos && audioFrame->mEos) {
217             break;
218         }
219 
220         if (*audioFrame < *videoFrame) {
221             ALOGV("take a frame");
222             mAudioFrames.take();
223             outstandingFrames.push_back(audioFrame);
224         } else {
225             ALOGV("take v frame");
226             mVideoFrames.take();
227             outstandingFrames.push_back(videoFrame);
228             if (videoFrame->mKey)
229                 numVideoKeyFrames++;
230         }
231 
232         if (numVideoKeyFrames == 2) {
233             flushFrames(outstandingFrames, /* last = */ false);
234             numVideoKeyFrames--;
235         }
236     }
237     ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
238     flushFrames(outstandingFrames, /* last = */ true);
239     mDone = true;
240 }
241 
242 //=================================================================================================
243 
244 static const int64_t kInitialDelayTimeUs = 700000LL;
245 
clearFlags()246 void WebmFrameMediaSourceThread::clearFlags() {
247     mDone = false;
248     mPaused = false;
249     mResumed = false;
250     mStarted = false;
251     mReachedEOS = false;
252 }
253 
WebmFrameMediaSourceThread(const sp<IMediaSource> & source,int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink,uint64_t timeCodeScale,int64_t startTimeRealUs,int32_t startTimeOffsetMs,int numTracks,bool realTimeRecording)254 WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
255         const sp<IMediaSource>& source,
256         int type,
257         LinkedBlockingQueue<const sp<WebmFrame> >& sink,
258         uint64_t timeCodeScale,
259         int64_t startTimeRealUs,
260         int32_t startTimeOffsetMs,
261         int numTracks,
262         bool realTimeRecording)
263     : WebmFrameSourceThread(type, sink),
264       mSource(source),
265       mTimeCodeScale(timeCodeScale),
266       mTrackDurationUs(0) {
267     clearFlags();
268     mStartTimeUs = startTimeRealUs;
269     if (realTimeRecording && numTracks > 1) {
270         /*
271          * Copied from MPEG4Writer
272          *
273          * This extra delay of accepting incoming audio/video signals
274          * helps to align a/v start time at the beginning of a recording
275          * session, and it also helps eliminate the "recording" sound for
276          * camcorder applications.
277          *
278          * If client does not set the start time offset, we fall back to
279          * use the default initial delay value.
280          */
281         int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
282         if (startTimeOffsetUs < 0) {  // Start time offset was not set
283             startTimeOffsetUs = kInitialDelayTimeUs;
284         }
285         mStartTimeUs += startTimeOffsetUs;
286         ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
287     }
288 }
289 
start()290 status_t WebmFrameMediaSourceThread::start() {
291     sp<MetaData> meta = new MetaData;
292     meta->setInt64(kKeyTime, mStartTimeUs);
293     status_t err = mSource->start(meta.get());
294     if (err != OK) {
295         mDone = true;
296         mReachedEOS = true;
297         return err;
298     } else {
299         mStarted = true;
300         return WebmFrameThread::start();
301     }
302 }
303 
resume()304 status_t WebmFrameMediaSourceThread::resume() {
305     if (!mDone && mPaused) {
306         mPaused = false;
307         mResumed = true;
308     }
309     return OK;
310 }
311 
pause()312 status_t WebmFrameMediaSourceThread::pause() {
313     if (mStarted) {
314         mPaused = true;
315     }
316     return OK;
317 }
318 
stop()319 status_t WebmFrameMediaSourceThread::stop() {
320     if (mStarted) {
321         mStarted = false;
322         mDone = true;
323         mSource->stop();
324         return WebmFrameThread::stop();
325     }
326     return OK;
327 }
328 
run()329 void WebmFrameMediaSourceThread::run() {
330     int32_t count = 0;
331     int64_t timestampUs = 0xdeadbeef;
332     int64_t lastTimestampUs = 0; // Previous sample time stamp
333     int64_t lastDurationUs = 0; // Previous sample duration
334     int64_t previousPausedDurationUs = 0;
335 
336     const uint64_t kUninitialized = 0xffffffffffffffffL;
337     mStartTimeUs = kUninitialized;
338 
339     status_t err = OK;
340     MediaBuffer *buffer;
341     while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
342         if (buffer->range_length() == 0) {
343             buffer->release();
344             buffer = NULL;
345             continue;
346         }
347 
348         sp<MetaData> md = buffer->meta_data();
349         CHECK(md->findInt64(kKeyTime, &timestampUs));
350         if (mStartTimeUs == kUninitialized) {
351             mStartTimeUs = timestampUs;
352         }
353         timestampUs -= mStartTimeUs;
354 
355         if (mPaused && !mResumed) {
356             lastDurationUs = timestampUs - lastTimestampUs;
357             lastTimestampUs = timestampUs;
358             buffer->release();
359             buffer = NULL;
360             continue;
361         }
362         ++count;
363 
364         // adjust time-stamps after pause/resume
365         if (mResumed) {
366             int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
367             CHECK_GE(durExcludingEarlierPausesUs, 0ll);
368             int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
369             CHECK_GE(pausedDurationUs, lastDurationUs);
370             previousPausedDurationUs += pausedDurationUs - lastDurationUs;
371             mResumed = false;
372         }
373         timestampUs -= previousPausedDurationUs;
374         CHECK_GE(timestampUs, 0ll);
375 
376         int32_t isSync = false;
377         md->findInt32(kKeyIsSyncFrame, &isSync);
378         const sp<WebmFrame> f = new WebmFrame(
379             mType,
380             isSync,
381             timestampUs * 1000 / mTimeCodeScale,
382             buffer);
383         mSink.push(f);
384 
385         ALOGV(
386             "%s %s frame at %" PRId64 " size %zu\n",
387             mType == kVideoType ? "video" : "audio",
388             isSync ? "I" : "P",
389             timestampUs * 1000 / mTimeCodeScale,
390             buffer->range_length());
391 
392         buffer->release();
393         buffer = NULL;
394 
395         if (timestampUs > mTrackDurationUs) {
396             mTrackDurationUs = timestampUs;
397         }
398         lastDurationUs = timestampUs - lastTimestampUs;
399         lastTimestampUs = timestampUs;
400     }
401 
402     mTrackDurationUs += lastDurationUs;
403     mSink.push(WebmFrame::EOS);
404 }
405 }
406