• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2014, The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaCodecSource"
19 #define DEBUG_DRIFT_TIME 0
20 
21 #include <inttypes.h>
22 
23 #include <gui/IGraphicBufferConsumer.h>
24 #include <gui/IGraphicBufferProducer.h>
25 #include <gui/Surface.h>
26 #include <media/ICrypto.h>
27 #include <media/stagefright/foundation/ABuffer.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/foundation/ALooper.h>
30 #include <media/stagefright/foundation/AMessage.h>
31 #include <media/stagefright/MediaBuffer.h>
32 #include <media/stagefright/MediaCodec.h>
33 #include <media/stagefright/MediaCodecList.h>
34 #include <media/stagefright/MediaCodecSource.h>
35 #include <media/stagefright/MediaErrors.h>
36 #include <media/stagefright/MediaSource.h>
37 #include <media/stagefright/MetaData.h>
38 #include <media/stagefright/PersistentSurface.h>
39 #include <media/stagefright/Utils.h>
40 
41 namespace android {
42 
43 const int32_t kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
44 const int32_t kDefaultHwVideoEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
45 const int32_t kDefaultVideoEncoderDataSpace = HAL_DATASPACE_V0_BT709;
46 
47 const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
48 
49 struct MediaCodecSource::Puller : public AHandler {
50     Puller(const sp<MediaSource> &source);
51 
52     void interruptSource();
53     status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
54     void stop();
55     void stopSource();
56     void pause();
57     void resume();
58 
59     bool readBuffer(MediaBuffer **buffer);
60 
61 protected:
62     virtual void onMessageReceived(const sp<AMessage> &msg);
63     virtual ~Puller();
64 
65 private:
66     enum {
67         kWhatStart = 'msta',
68         kWhatStop,
69         kWhatPull,
70     };
71 
72     sp<MediaSource> mSource;
73     sp<AMessage> mNotify;
74     sp<ALooper> mLooper;
75     bool mIsAudio;
76 
77     struct Queue {
Queueandroid::MediaCodecSource::Puller::Queue78         Queue()
79             : mReadPendingSince(0),
80               mPaused(false),
81               mPulling(false) { }
82         int64_t mReadPendingSince;
83         bool mPaused;
84         bool mPulling;
85         Vector<MediaBuffer *> mReadBuffers;
86 
87         void flush();
88         // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
89         // buffer from front of the queue, place it into *|buffer| and return true.
90         bool readBuffer(MediaBuffer **buffer);
91         // add a buffer to the back of the queue
92         void pushBuffer(MediaBuffer *mbuf);
93     };
94     Mutexed<Queue> mQueue;
95 
96     status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
97     void schedulePull();
98     void handleEOS();
99 
100     DISALLOW_EVIL_CONSTRUCTORS(Puller);
101 };
102 
Puller(const sp<MediaSource> & source)103 MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
104     : mSource(source),
105       mLooper(new ALooper()),
106       mIsAudio(false)
107 {
108     sp<MetaData> meta = source->getFormat();
109     const char *mime;
110     CHECK(meta->findCString(kKeyMIMEType, &mime));
111 
112     mIsAudio = !strncasecmp(mime, "audio/", 6);
113 
114     mLooper->setName("pull_looper");
115 }
116 
~Puller()117 MediaCodecSource::Puller::~Puller() {
118     mLooper->unregisterHandler(id());
119     mLooper->stop();
120 }
121 
pushBuffer(MediaBuffer * mbuf)122 void MediaCodecSource::Puller::Queue::pushBuffer(MediaBuffer *mbuf) {
123     mReadBuffers.push_back(mbuf);
124 }
125 
readBuffer(MediaBuffer ** mbuf)126 bool MediaCodecSource::Puller::Queue::readBuffer(MediaBuffer **mbuf) {
127     if (mReadBuffers.empty()) {
128         *mbuf = NULL;
129         return false;
130     }
131     *mbuf = *mReadBuffers.begin();
132     mReadBuffers.erase(mReadBuffers.begin());
133     return true;
134 }
135 
flush()136 void MediaCodecSource::Puller::Queue::flush() {
137     MediaBuffer *mbuf;
138     while (readBuffer(&mbuf)) {
139         // there are no null buffers in the queue
140         mbuf->release();
141     }
142 }
143 
readBuffer(MediaBuffer ** mbuf)144 bool MediaCodecSource::Puller::readBuffer(MediaBuffer **mbuf) {
145     Mutexed<Queue>::Locked queue(mQueue);
146     return queue->readBuffer(mbuf);
147 }
148 
postSynchronouslyAndReturnError(const sp<AMessage> & msg)149 status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
150         const sp<AMessage> &msg) {
151     sp<AMessage> response;
152     status_t err = msg->postAndAwaitResponse(&response);
153 
154     if (err != OK) {
155         return err;
156     }
157 
158     if (!response->findInt32("err", &err)) {
159         err = OK;
160     }
161 
162     return err;
163 }
164 
start(const sp<MetaData> & meta,const sp<AMessage> & notify)165 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> &notify) {
166     ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
167     mLooper->start(
168             false /* runOnCallingThread */,
169             false /* canCallJava */,
170             PRIORITY_AUDIO);
171     mLooper->registerHandler(this);
172     mNotify = notify;
173 
174     sp<AMessage> msg = new AMessage(kWhatStart, this);
175     msg->setObject("meta", meta);
176     return postSynchronouslyAndReturnError(msg);
177 }
178 
stop()179 void MediaCodecSource::Puller::stop() {
180     bool interrupt = false;
181     {
182         // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
183         // stop.
184         Mutexed<Queue>::Locked queue(mQueue);
185         queue->mPulling = false;
186         interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
187         queue->flush(); // flush any unprocessed pulled buffers
188     }
189 
190     if (interrupt) {
191         interruptSource();
192     }
193 }
194 
interruptSource()195 void MediaCodecSource::Puller::interruptSource() {
196     // call source->stop if read has been pending for over a second
197     // We have to call this outside the looper as looper is pending on the read.
198     mSource->stop();
199 }
200 
stopSource()201 void MediaCodecSource::Puller::stopSource() {
202     sp<AMessage> msg = new AMessage(kWhatStop, this);
203     (void)postSynchronouslyAndReturnError(msg);
204 }
205 
pause()206 void MediaCodecSource::Puller::pause() {
207     Mutexed<Queue>::Locked queue(mQueue);
208     queue->mPaused = true;
209 }
210 
resume()211 void MediaCodecSource::Puller::resume() {
212     Mutexed<Queue>::Locked queue(mQueue);
213     queue->mPaused = false;
214 }
215 
schedulePull()216 void MediaCodecSource::Puller::schedulePull() {
217     (new AMessage(kWhatPull, this))->post();
218 }
219 
handleEOS()220 void MediaCodecSource::Puller::handleEOS() {
221     ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
222     sp<AMessage> msg = mNotify->dup();
223     msg->setInt32("eos", 1);
224     msg->post();
225 }
226 
onMessageReceived(const sp<AMessage> & msg)227 void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
228     switch (msg->what()) {
229         case kWhatStart:
230         {
231             sp<RefBase> obj;
232             CHECK(msg->findObject("meta", &obj));
233 
234             {
235                 Mutexed<Queue>::Locked queue(mQueue);
236                 queue->mPulling = true;
237             }
238 
239             status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
240 
241             if (err == OK) {
242                 schedulePull();
243             }
244 
245             sp<AMessage> response = new AMessage;
246             response->setInt32("err", err);
247 
248             sp<AReplyToken> replyID;
249             CHECK(msg->senderAwaitsResponse(&replyID));
250             response->postReply(replyID);
251             break;
252         }
253 
254         case kWhatStop:
255         {
256             mSource->stop();
257 
258             sp<AMessage> response = new AMessage;
259             response->setInt32("err", OK);
260 
261             sp<AReplyToken> replyID;
262             CHECK(msg->senderAwaitsResponse(&replyID));
263             response->postReply(replyID);
264             break;
265         }
266 
267         case kWhatPull:
268         {
269             Mutexed<Queue>::Locked queue(mQueue);
270             queue->mReadPendingSince = ALooper::GetNowUs();
271             if (!queue->mPulling) {
272                 handleEOS();
273                 break;
274             }
275 
276             queue.unlock();
277             MediaBuffer *mbuf = NULL;
278             status_t err = mSource->read(&mbuf);
279             queue.lock();
280 
281             queue->mReadPendingSince = 0;
282             // if we need to discard buffer
283             if (!queue->mPulling || queue->mPaused || err != OK) {
284                 if (mbuf != NULL) {
285                     mbuf->release();
286                     mbuf = NULL;
287                 }
288                 if (queue->mPulling && err == OK) {
289                     msg->post(); // if simply paused, keep pulling source
290                     break;
291                 } else if (err == ERROR_END_OF_STREAM) {
292                     ALOGV("stream ended, mbuf %p", mbuf);
293                 } else if (err != OK) {
294                     ALOGE("error %d reading stream.", err);
295                 }
296             }
297 
298             if (mbuf != NULL) {
299                 queue->pushBuffer(mbuf);
300             }
301 
302             queue.unlock();
303 
304             if (mbuf != NULL) {
305                 mNotify->post();
306                 msg->post();
307             } else {
308                 handleEOS();
309             }
310             break;
311         }
312 
313         default:
314             TRESPASS();
315     }
316 }
317 
Output()318 MediaCodecSource::Output::Output()
319     : mEncoderReachedEOS(false),
320       mErrorCode(OK) {
321 }
322 
323 // static
Create(const sp<ALooper> & looper,const sp<AMessage> & format,const sp<MediaSource> & source,const sp<IGraphicBufferConsumer> & consumer,uint32_t flags)324 sp<MediaCodecSource> MediaCodecSource::Create(
325         const sp<ALooper> &looper,
326         const sp<AMessage> &format,
327         const sp<MediaSource> &source,
328         const sp<IGraphicBufferConsumer> &consumer,
329         uint32_t flags) {
330     sp<MediaCodecSource> mediaSource =
331             new MediaCodecSource(looper, format, source, consumer, flags);
332 
333     if (mediaSource->init() == OK) {
334         return mediaSource;
335     }
336     return NULL;
337 }
338 
setInputBufferTimeOffset(int64_t timeOffsetUs)339 status_t MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
340     sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
341     msg->setInt64("time-offset-us", timeOffsetUs);
342     return postSynchronouslyAndReturnError(msg);
343 }
344 
getFirstSampleSystemTimeUs()345 int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
346     sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
347     sp<AMessage> response;
348     msg->postAndAwaitResponse(&response);
349     int64_t timeUs;
350     if (!response->findInt64("time-us", &timeUs)) {
351         timeUs = -1ll;
352     }
353     return timeUs;
354 }
355 
start(MetaData * params)356 status_t MediaCodecSource::start(MetaData* params) {
357     sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
358     msg->setObject("meta", params);
359     return postSynchronouslyAndReturnError(msg);
360 }
361 
stop()362 status_t MediaCodecSource::stop() {
363     sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
364     return postSynchronouslyAndReturnError(msg);
365 }
366 
pause()367 status_t MediaCodecSource::pause() {
368     (new AMessage(kWhatPause, mReflector))->post();
369     return OK;
370 }
371 
getFormat()372 sp<MetaData> MediaCodecSource::getFormat() {
373     Mutexed<sp<MetaData>>::Locked meta(mMeta);
374     return *meta;
375 }
376 
getGraphicBufferProducer()377 sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
378     CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
379     return mGraphicBufferProducer;
380 }
381 
read(MediaBuffer ** buffer,const ReadOptions *)382 status_t MediaCodecSource::read(
383         MediaBuffer** buffer, const ReadOptions* /* options */) {
384     Mutexed<Output>::Locked output(mOutput);
385 
386     *buffer = NULL;
387     while (output->mBufferQueue.size() == 0 && !output->mEncoderReachedEOS) {
388         output.waitForCondition(output->mCond);
389     }
390     if (!output->mEncoderReachedEOS) {
391         *buffer = *output->mBufferQueue.begin();
392         output->mBufferQueue.erase(output->mBufferQueue.begin());
393         return OK;
394     }
395     return output->mErrorCode;
396 }
397 
signalBufferReturned(MediaBuffer * buffer)398 void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
399     buffer->setObserver(0);
400     buffer->release();
401 }
402 
MediaCodecSource(const sp<ALooper> & looper,const sp<AMessage> & outputFormat,const sp<MediaSource> & source,const sp<IGraphicBufferConsumer> & consumer,uint32_t flags)403 MediaCodecSource::MediaCodecSource(
404         const sp<ALooper> &looper,
405         const sp<AMessage> &outputFormat,
406         const sp<MediaSource> &source,
407         const sp<IGraphicBufferConsumer> &consumer,
408         uint32_t flags)
409     : mLooper(looper),
410       mOutputFormat(outputFormat),
411       mMeta(new MetaData),
412       mFlags(flags),
413       mIsVideo(false),
414       mStarted(false),
415       mStopping(false),
416       mDoMoreWorkPending(false),
417       mSetEncoderFormat(false),
418       mEncoderFormat(0),
419       mEncoderDataSpace(0),
420       mGraphicBufferConsumer(consumer),
421       mInputBufferTimeOffsetUs(0),
422       mFirstSampleSystemTimeUs(-1ll),
423       mPausePending(false),
424       mFirstSampleTimeUs(-1ll),
425       mGeneration(0) {
426     CHECK(mLooper != NULL);
427 
428     AString mime;
429     CHECK(mOutputFormat->findString("mime", &mime));
430 
431     if (!strncasecmp("video/", mime.c_str(), 6)) {
432         mIsVideo = true;
433     }
434 
435     if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
436         mPuller = new Puller(source);
437     }
438 }
439 
~MediaCodecSource()440 MediaCodecSource::~MediaCodecSource() {
441     releaseEncoder();
442 
443     mCodecLooper->stop();
444     mLooper->unregisterHandler(mReflector->id());
445 }
446 
init()447 status_t MediaCodecSource::init() {
448     status_t err = initEncoder();
449 
450     if (err != OK) {
451         releaseEncoder();
452     }
453 
454     return err;
455 }
456 
initEncoder()457 status_t MediaCodecSource::initEncoder() {
458     mReflector = new AHandlerReflector<MediaCodecSource>(this);
459     mLooper->registerHandler(mReflector);
460 
461     mCodecLooper = new ALooper;
462     mCodecLooper->setName("codec_looper");
463     mCodecLooper->start();
464 
465     if (mFlags & FLAG_USE_SURFACE_INPUT) {
466         mOutputFormat->setInt32("create-input-buffers-suspended", 1);
467     }
468 
469     AString outputMIME;
470     CHECK(mOutputFormat->findString("mime", &outputMIME));
471 
472     Vector<AString> matchingCodecs;
473     MediaCodecList::findMatchingCodecs(
474             outputMIME.c_str(), true /* encoder */,
475             ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
476             &matchingCodecs);
477 
478     status_t err = NO_INIT;
479     for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
480         mEncoder = MediaCodec::CreateByComponentName(
481                 mCodecLooper, matchingCodecs[ix]);
482 
483         if (mEncoder == NULL) {
484             continue;
485         }
486 
487         ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
488 
489         mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
490         mEncoder->setCallback(mEncoderActivityNotify);
491 
492         err = mEncoder->configure(
493                     mOutputFormat,
494                     NULL /* nativeWindow */,
495                     NULL /* crypto */,
496                     MediaCodec::CONFIGURE_FLAG_ENCODE);
497 
498         if (err == OK) {
499             break;
500         }
501         mEncoder->release();
502         mEncoder = NULL;
503     }
504 
505     if (err != OK) {
506         return err;
507     }
508 
509     mEncoder->getOutputFormat(&mOutputFormat);
510     sp<MetaData> meta = new MetaData;
511     convertMessageToMetaData(mOutputFormat, meta);
512     mMeta.lock().set(meta);
513 
514     if (mFlags & FLAG_USE_SURFACE_INPUT) {
515         CHECK(mIsVideo);
516 
517         if (mGraphicBufferConsumer != NULL) {
518             // When using persistent surface, we are only interested in the
519             // consumer, but have to use PersistentSurface as a wrapper to
520             // pass consumer over messages (similar to BufferProducerWrapper)
521             err = mEncoder->setInputSurface(
522                     new PersistentSurface(NULL, mGraphicBufferConsumer));
523         } else {
524             err = mEncoder->createInputSurface(&mGraphicBufferProducer);
525         }
526 
527         if (err != OK) {
528             return err;
529         }
530     }
531 
532     sp<AMessage> inputFormat;
533     int32_t usingSwReadOften;
534     mSetEncoderFormat = false;
535     if (mEncoder->getInputFormat(&inputFormat) == OK) {
536         mSetEncoderFormat = true;
537         if (inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
538                 && usingSwReadOften) {
539             // this is a SW encoder; signal source to allocate SW readable buffers
540             mEncoderFormat = kDefaultSwVideoEncoderFormat;
541         } else {
542             mEncoderFormat = kDefaultHwVideoEncoderFormat;
543         }
544         if (!inputFormat->findInt32("android._dataspace", &mEncoderDataSpace)) {
545             mEncoderDataSpace = kDefaultVideoEncoderDataSpace;
546         }
547         ALOGV("setting dataspace %#x, format %#x", mEncoderDataSpace, mEncoderFormat);
548     }
549 
550     err = mEncoder->start();
551 
552     if (err != OK) {
553         return err;
554     }
555 
556     {
557         Mutexed<Output>::Locked output(mOutput);
558         output->mEncoderReachedEOS = false;
559         output->mErrorCode = OK;
560     }
561 
562     return OK;
563 }
564 
releaseEncoder()565 void MediaCodecSource::releaseEncoder() {
566     if (mEncoder == NULL) {
567         return;
568     }
569 
570     mEncoder->release();
571     mEncoder.clear();
572 }
573 
postSynchronouslyAndReturnError(const sp<AMessage> & msg)574 status_t MediaCodecSource::postSynchronouslyAndReturnError(
575         const sp<AMessage> &msg) {
576     sp<AMessage> response;
577     status_t err = msg->postAndAwaitResponse(&response);
578 
579     if (err != OK) {
580         return err;
581     }
582 
583     if (!response->findInt32("err", &err)) {
584         err = OK;
585     }
586 
587     return err;
588 }
589 
signalEOS(status_t err)590 void MediaCodecSource::signalEOS(status_t err) {
591     bool reachedEOS = false;
592     {
593         Mutexed<Output>::Locked output(mOutput);
594         reachedEOS = output->mEncoderReachedEOS;
595         if (!reachedEOS) {
596             ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
597             // release all unread media buffers
598             for (List<MediaBuffer*>::iterator it = output->mBufferQueue.begin();
599                     it != output->mBufferQueue.end(); it++) {
600                 (*it)->release();
601             }
602             output->mBufferQueue.clear();
603             output->mEncoderReachedEOS = true;
604             output->mErrorCode = err;
605             output->mCond.signal();
606 
607             reachedEOS = true;
608             output.unlock();
609             releaseEncoder();
610         }
611     }
612 
613     if (mStopping && reachedEOS) {
614         ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
615         mPuller->stopSource();
616         ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
617         // posting reply to everyone that's waiting
618         List<sp<AReplyToken>>::iterator it;
619         for (it = mStopReplyIDQueue.begin();
620                 it != mStopReplyIDQueue.end(); it++) {
621             (new AMessage)->postReply(*it);
622         }
623         mStopReplyIDQueue.clear();
624         mStopping = false;
625         ++mGeneration;
626     }
627 }
628 
suspend()629 void MediaCodecSource::suspend() {
630     CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
631     if (mEncoder != NULL) {
632         sp<AMessage> params = new AMessage;
633         params->setInt32("drop-input-frames", true);
634         mEncoder->setParameters(params);
635     }
636 }
637 
resume(int64_t skipFramesBeforeUs)638 void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
639     CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
640     if (mEncoder != NULL) {
641         sp<AMessage> params = new AMessage;
642         params->setInt32("drop-input-frames", false);
643         if (skipFramesBeforeUs > 0) {
644             params->setInt64("skip-frames-before", skipFramesBeforeUs);
645         }
646         mEncoder->setParameters(params);
647     }
648 }
649 
feedEncoderInputBuffers()650 status_t MediaCodecSource::feedEncoderInputBuffers() {
651     MediaBuffer* mbuf = NULL;
652     while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
653         size_t bufferIndex = *mAvailEncoderInputIndices.begin();
654         mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
655 
656         int64_t timeUs = 0ll;
657         uint32_t flags = 0;
658         size_t size = 0;
659 
660         if (mbuf != NULL) {
661             CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
662             if (mFirstSampleSystemTimeUs < 0ll) {
663                 mFirstSampleSystemTimeUs = systemTime() / 1000;
664                 if (mPausePending) {
665                     mPausePending = false;
666                     onPause();
667                     mbuf->release();
668                     mAvailEncoderInputIndices.push_back(bufferIndex);
669                     return OK;
670                 }
671             }
672 
673             timeUs += mInputBufferTimeOffsetUs;
674 
675             // push decoding time for video, or drift time for audio
676             if (mIsVideo) {
677                 mDecodingTimeQueue.push_back(timeUs);
678             } else {
679 #if DEBUG_DRIFT_TIME
680                 if (mFirstSampleTimeUs < 0ll) {
681                     mFirstSampleTimeUs = timeUs;
682                 }
683                 int64_t driftTimeUs = 0;
684                 if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
685                         && driftTimeUs) {
686                     driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
687                 }
688                 mDriftTimeQueue.push_back(driftTimeUs);
689 #endif // DEBUG_DRIFT_TIME
690             }
691 
692             sp<ABuffer> inbuf;
693             status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
694             if (err != OK || inbuf == NULL) {
695                 mbuf->release();
696                 signalEOS();
697                 break;
698             }
699 
700             size = mbuf->size();
701 
702             memcpy(inbuf->data(), mbuf->data(), size);
703 
704             if (mIsVideo) {
705                 // video encoder will release MediaBuffer when done
706                 // with underlying data.
707                 inbuf->setMediaBufferBase(mbuf);
708             } else {
709                 mbuf->release();
710             }
711         } else {
712             flags = MediaCodec::BUFFER_FLAG_EOS;
713         }
714 
715         status_t err = mEncoder->queueInputBuffer(
716                 bufferIndex, 0, size, timeUs, flags);
717 
718         if (err != OK) {
719             return err;
720         }
721     }
722 
723     return OK;
724 }
725 
onStart(MetaData * params)726 status_t MediaCodecSource::onStart(MetaData *params) {
727     if (mStopping) {
728         ALOGE("Failed to start while we're stopping");
729         return INVALID_OPERATION;
730     }
731 
732     if (mStarted) {
733         ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
734         if (mPausePending) {
735             mPausePending = false;
736             return OK;
737         }
738         if (mIsVideo) {
739             mEncoder->requestIDRFrame();
740         }
741         if (mFlags & FLAG_USE_SURFACE_INPUT) {
742             resume();
743         } else {
744             CHECK(mPuller != NULL);
745             mPuller->resume();
746         }
747         return OK;
748     }
749 
750     ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
751 
752     status_t err = OK;
753 
754     if (mFlags & FLAG_USE_SURFACE_INPUT) {
755         int64_t startTimeUs;
756         if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
757             startTimeUs = -1ll;
758         }
759         resume(startTimeUs);
760     } else {
761         CHECK(mPuller != NULL);
762         sp<MetaData> meta = params;
763         if (mSetEncoderFormat) {
764             if (meta == NULL) {
765                 meta = new MetaData;
766             }
767             meta->setInt32(kKeyPixelFormat, mEncoderFormat);
768             meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
769         }
770 
771         sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
772         err = mPuller->start(meta.get(), notify);
773         if (err != OK) {
774             return err;
775         }
776     }
777 
778     ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
779 
780     mStarted = true;
781     return OK;
782 }
783 
onPause()784 void MediaCodecSource::onPause() {
785     if (mFlags & FLAG_USE_SURFACE_INPUT) {
786         suspend();
787     } else {
788         CHECK(mPuller != NULL);
789         mPuller->pause();
790     }
791 }
792 
onMessageReceived(const sp<AMessage> & msg)793 void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
794     switch (msg->what()) {
795     case kWhatPullerNotify:
796     {
797         int32_t eos = 0;
798         if (msg->findInt32("eos", &eos) && eos) {
799             ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
800             signalEOS();
801             break;
802         }
803 
804         if (mEncoder == NULL) {
805             ALOGV("got msg '%s' after encoder shutdown.", msg->debugString().c_str());
806             break;
807         }
808 
809         feedEncoderInputBuffers();
810         break;
811     }
812     case kWhatEncoderActivity:
813     {
814         if (mEncoder == NULL) {
815             break;
816         }
817 
818         int32_t cbID;
819         CHECK(msg->findInt32("callbackID", &cbID));
820         if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
821             int32_t index;
822             CHECK(msg->findInt32("index", &index));
823 
824             mAvailEncoderInputIndices.push_back(index);
825             feedEncoderInputBuffers();
826         } else if (cbID == MediaCodec::CB_OUTPUT_FORMAT_CHANGED) {
827             status_t err = mEncoder->getOutputFormat(&mOutputFormat);
828             if (err != OK) {
829                 signalEOS(err);
830                 break;
831             }
832             sp<MetaData> meta = new MetaData;
833             convertMessageToMetaData(mOutputFormat, meta);
834             mMeta.lock().set(meta);
835         } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
836             int32_t index;
837             size_t offset;
838             size_t size;
839             int64_t timeUs;
840             int32_t flags;
841 
842             CHECK(msg->findInt32("index", &index));
843             CHECK(msg->findSize("offset", &offset));
844             CHECK(msg->findSize("size", &size));
845             CHECK(msg->findInt64("timeUs", &timeUs));
846             CHECK(msg->findInt32("flags", &flags));
847 
848             if (flags & MediaCodec::BUFFER_FLAG_EOS) {
849                 mEncoder->releaseOutputBuffer(index);
850                 signalEOS();
851                 break;
852             }
853 
854             sp<ABuffer> outbuf;
855             status_t err = mEncoder->getOutputBuffer(index, &outbuf);
856             if (err != OK || outbuf == NULL) {
857                 signalEOS();
858                 break;
859             }
860 
861             MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
862             mbuf->add_ref();
863 
864             if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
865                 if (mIsVideo) {
866                     int64_t decodingTimeUs;
867                     if (mFlags & FLAG_USE_SURFACE_INPUT) {
868                         if (mFirstSampleSystemTimeUs < 0ll) {
869                             mFirstSampleSystemTimeUs = systemTime() / 1000;
870                             if (mPausePending) {
871                                 mPausePending = false;
872                                 onPause();
873                                 mbuf->release();
874                                 break;
875                             }
876                         }
877                         // Timestamp offset is already adjusted in GraphicBufferSource.
878                         // GraphicBufferSource is supposed to discard samples
879                         // queued before start, and offset timeUs by start time
880                         CHECK_GE(timeUs, 0ll);
881                         // TODO:
882                         // Decoding time for surface source is unavailable,
883                         // use presentation time for now. May need to move
884                         // this logic into MediaCodec.
885                         decodingTimeUs = timeUs;
886                     } else {
887                         CHECK(!mDecodingTimeQueue.empty());
888                         decodingTimeUs = *(mDecodingTimeQueue.begin());
889                         mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
890                     }
891                     mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
892 
893                     ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
894                             timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
895                 } else {
896                     int64_t driftTimeUs = 0;
897 #if DEBUG_DRIFT_TIME
898                     CHECK(!mDriftTimeQueue.empty());
899                     driftTimeUs = *(mDriftTimeQueue.begin());
900                     mDriftTimeQueue.erase(mDriftTimeQueue.begin());
901                     mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
902 #endif // DEBUG_DRIFT_TIME
903                     ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
904                             timeUs, timeUs / 1E6, driftTimeUs);
905                 }
906                 mbuf->meta_data()->setInt64(kKeyTime, timeUs);
907             } else {
908                 mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
909             }
910             if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
911                 mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
912             }
913             memcpy(mbuf->data(), outbuf->data(), outbuf->size());
914             mbuf->setObserver(this);
915 
916             {
917                 Mutexed<Output>::Locked output(mOutput);
918                 output->mBufferQueue.push_back(mbuf);
919                 output->mCond.signal();
920             }
921 
922             mEncoder->releaseOutputBuffer(index);
923        } else if (cbID == MediaCodec::CB_ERROR) {
924             status_t err;
925             CHECK(msg->findInt32("err", &err));
926             ALOGE("Encoder (%s) reported error : 0x%x",
927                     mIsVideo ? "video" : "audio", err);
928             signalEOS();
929        }
930        break;
931     }
932     case kWhatStart:
933     {
934         sp<AReplyToken> replyID;
935         CHECK(msg->senderAwaitsResponse(&replyID));
936 
937         sp<RefBase> obj;
938         CHECK(msg->findObject("meta", &obj));
939         MetaData *params = static_cast<MetaData *>(obj.get());
940 
941         sp<AMessage> response = new AMessage;
942         response->setInt32("err", onStart(params));
943         response->postReply(replyID);
944         break;
945     }
946     case kWhatStop:
947     {
948         ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
949 
950         sp<AReplyToken> replyID;
951         CHECK(msg->senderAwaitsResponse(&replyID));
952 
953         if (mOutput.lock()->mEncoderReachedEOS) {
954             // if we already reached EOS, reply and return now
955             ALOGI("encoder (%s) already stopped",
956                     mIsVideo ? "video" : "audio");
957             (new AMessage)->postReply(replyID);
958             break;
959         }
960 
961         mStopReplyIDQueue.push_back(replyID);
962         if (mStopping) {
963             // nothing to do if we're already stopping, reply will be posted
964             // to all when we're stopped.
965             break;
966         }
967 
968         mStopping = true;
969 
970         // if using surface, signal source EOS and wait for EOS to come back.
971         // otherwise, stop puller (which also clears the input buffer queue)
972         // and wait for the EOS message. We cannot call source->stop() because
973         // the encoder may still be processing input buffers.
974         if (mFlags & FLAG_USE_SURFACE_INPUT) {
975             mEncoder->signalEndOfInputStream();
976         } else {
977             mPuller->stop();
978         }
979 
980         // complete stop even if encoder/puller stalled
981         sp<AMessage> timeoutMsg = new AMessage(kWhatStopStalled, mReflector);
982         timeoutMsg->setInt32("generation", mGeneration);
983         timeoutMsg->post(kStopTimeoutUs);
984         break;
985     }
986 
987     case kWhatStopStalled:
988     {
989         int32_t generation;
990         CHECK(msg->findInt32("generation", &generation));
991         if (generation != mGeneration) {
992              break;
993         }
994 
995         if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
996             ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
997             mPuller->interruptSource();
998             ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
999         }
1000         signalEOS();
1001     }
1002 
1003     case kWhatPause:
1004     {
1005         if (mFirstSampleSystemTimeUs < 0) {
1006             mPausePending = true;
1007         } else {
1008             onPause();
1009         }
1010         break;
1011     }
1012     case kWhatSetInputBufferTimeOffset:
1013     {
1014         sp<AReplyToken> replyID;
1015         CHECK(msg->senderAwaitsResponse(&replyID));
1016         status_t err = OK;
1017         CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
1018 
1019         // Propagate the timestamp offset to GraphicBufferSource.
1020         if (mIsVideo) {
1021             sp<AMessage> params = new AMessage;
1022             params->setInt64("time-offset-us", mInputBufferTimeOffsetUs);
1023             err = mEncoder->setParameters(params);
1024         }
1025 
1026         sp<AMessage> response = new AMessage;
1027         response->setInt32("err", err);
1028         response->postReply(replyID);
1029         break;
1030     }
1031     case kWhatGetFirstSampleSystemTimeUs:
1032     {
1033         sp<AReplyToken> replyID;
1034         CHECK(msg->senderAwaitsResponse(&replyID));
1035 
1036         sp<AMessage> response = new AMessage;
1037         response->setInt64("time-us", mFirstSampleSystemTimeUs);
1038         response->postReply(replyID);
1039         break;
1040     }
1041     default:
1042         TRESPASS();
1043     }
1044 }
1045 
1046 } // namespace android
1047