1 /*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaCodecSource"
19 #define DEBUG_DRIFT_TIME 0
20
21 #include <inttypes.h>
22
23 #include <gui/IGraphicBufferProducer.h>
24 #include <gui/Surface.h>
25 #include <mediadrm/ICrypto.h>
26 #include <media/MediaBufferHolder.h>
27 #include <media/MediaCodecBuffer.h>
28 #include <media/stagefright/MediaSource.h>
29 #include <media/stagefright/foundation/ABuffer.h>
30 #include <media/stagefright/foundation/ADebug.h>
31 #include <media/stagefright/foundation/ALooper.h>
32 #include <media/stagefright/foundation/AMessage.h>
33 #include <media/stagefright/foundation/ColorUtils.h>
34 #include <media/stagefright/MediaBuffer.h>
35 #include <media/stagefright/MediaCodec.h>
36 #include <media/stagefright/MediaCodecConstants.h>
37 #include <media/stagefright/MediaCodecList.h>
38 #include <media/stagefright/MediaCodecSource.h>
39 #include <media/stagefright/MediaErrors.h>
40 #include <media/stagefright/MetaData.h>
41 #include <media/stagefright/Utils.h>
42
43 namespace android {
44
45 const int32_t kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
46 const int32_t kDefaultHwVideoEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
47 const int32_t kDefaultVideoEncoderDataSpace = HAL_DATASPACE_V0_BT709;
48
49 const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
50 // allow maximum 1 sec for stop time offset. This limits the the delay in the
51 // input source.
52 const int kMaxStopTimeOffsetUs = 1000000;
53
54 struct MediaCodecSource::Puller : public AHandler {
55 explicit Puller(const sp<MediaSource> &source);
56
57 void interruptSource();
58 status_t start(const sp<MetaData> &meta, const sp<AMessage> ¬ify);
59 void stop();
60 void stopSource();
61 void pause();
62 void resume();
63 status_t setStopTimeUs(int64_t stopTimeUs);
64 bool readBuffer(MediaBufferBase **buffer);
65
66 protected:
67 virtual void onMessageReceived(const sp<AMessage> &msg);
68 virtual ~Puller();
69
70 private:
71 enum {
72 kWhatStart = 'msta',
73 kWhatStop,
74 kWhatPull,
75 kWhatSetStopTimeUs,
76 };
77
78 sp<MediaSource> mSource;
79 sp<AMessage> mNotify;
80 sp<ALooper> mLooper;
81 bool mIsAudio;
82
83 struct Queue {
Queueandroid::MediaCodecSource::Puller::Queue84 Queue()
85 : mReadPendingSince(0),
86 mPaused(false),
87 mPulling(false) { }
88 int64_t mReadPendingSince;
89 bool mPaused;
90 bool mPulling;
91 Vector<MediaBufferBase *> mReadBuffers;
92
93 void flush();
94 // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
95 // buffer from front of the queue, place it into *|buffer| and return true.
96 bool readBuffer(MediaBufferBase **buffer);
97 // add a buffer to the back of the queue
98 void pushBuffer(MediaBufferBase *mbuf);
99 };
100 Mutexed<Queue> mQueue;
101
102 status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
103 void schedulePull();
104 void handleEOS();
105
106 DISALLOW_EVIL_CONSTRUCTORS(Puller);
107 };
108
Puller(const sp<MediaSource> & source)109 MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
110 : mSource(source),
111 mLooper(new ALooper()),
112 mIsAudio(false)
113 {
114 sp<MetaData> meta = source->getFormat();
115 const char *mime;
116 CHECK(meta->findCString(kKeyMIMEType, &mime));
117
118 mIsAudio = !strncasecmp(mime, "audio/", 6);
119
120 mLooper->setName("pull_looper");
121 }
122
~Puller()123 MediaCodecSource::Puller::~Puller() {
124 mLooper->unregisterHandler(id());
125 mLooper->stop();
126 }
127
pushBuffer(MediaBufferBase * mbuf)128 void MediaCodecSource::Puller::Queue::pushBuffer(MediaBufferBase *mbuf) {
129 mReadBuffers.push_back(mbuf);
130 }
131
readBuffer(MediaBufferBase ** mbuf)132 bool MediaCodecSource::Puller::Queue::readBuffer(MediaBufferBase **mbuf) {
133 if (mReadBuffers.empty()) {
134 *mbuf = NULL;
135 return false;
136 }
137 *mbuf = *mReadBuffers.begin();
138 mReadBuffers.erase(mReadBuffers.begin());
139 return true;
140 }
141
flush()142 void MediaCodecSource::Puller::Queue::flush() {
143 MediaBufferBase *mbuf;
144 while (readBuffer(&mbuf)) {
145 // there are no null buffers in the queue
146 mbuf->release();
147 }
148 }
149
readBuffer(MediaBufferBase ** mbuf)150 bool MediaCodecSource::Puller::readBuffer(MediaBufferBase **mbuf) {
151 Mutexed<Queue>::Locked queue(mQueue);
152 return queue->readBuffer(mbuf);
153 }
154
postSynchronouslyAndReturnError(const sp<AMessage> & msg)155 status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
156 const sp<AMessage> &msg) {
157 sp<AMessage> response;
158 status_t err = msg->postAndAwaitResponse(&response);
159
160 if (err != OK) {
161 return err;
162 }
163
164 if (!response->findInt32("err", &err)) {
165 err = OK;
166 }
167
168 return err;
169 }
170
setStopTimeUs(int64_t stopTimeUs)171 status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
172 return mSource->setStopTimeUs(stopTimeUs);
173 }
174
start(const sp<MetaData> & meta,const sp<AMessage> & notify)175 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> ¬ify) {
176 ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
177 mLooper->start(
178 false /* runOnCallingThread */,
179 false /* canCallJava */,
180 PRIORITY_AUDIO);
181 mLooper->registerHandler(this);
182 mNotify = notify;
183
184 sp<AMessage> msg = new AMessage(kWhatStart, this);
185 msg->setObject("meta", meta);
186 return postSynchronouslyAndReturnError(msg);
187 }
188
stop()189 void MediaCodecSource::Puller::stop() {
190 // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
191 // stop.
192 Mutexed<Queue>::Locked queue(mQueue);
193 queue->mPulling = false;
194 queue->flush(); // flush any unprocessed pulled buffers
195 }
196
interruptSource()197 void MediaCodecSource::Puller::interruptSource() {
198 // call source->stop if read has been pending for over a second
199 // We have to call this outside the looper as looper is pending on the read.
200 mSource->stop();
201 }
202
stopSource()203 void MediaCodecSource::Puller::stopSource() {
204 sp<AMessage> msg = new AMessage(kWhatStop, this);
205 (void)postSynchronouslyAndReturnError(msg);
206 }
207
pause()208 void MediaCodecSource::Puller::pause() {
209 Mutexed<Queue>::Locked queue(mQueue);
210 queue->mPaused = true;
211 }
212
resume()213 void MediaCodecSource::Puller::resume() {
214 Mutexed<Queue>::Locked queue(mQueue);
215 queue->flush();
216 queue->mPaused = false;
217 }
218
schedulePull()219 void MediaCodecSource::Puller::schedulePull() {
220 (new AMessage(kWhatPull, this))->post();
221 }
222
handleEOS()223 void MediaCodecSource::Puller::handleEOS() {
224 ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
225 sp<AMessage> msg = mNotify->dup();
226 msg->setInt32("eos", 1);
227 msg->post();
228 }
229
onMessageReceived(const sp<AMessage> & msg)230 void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
231 switch (msg->what()) {
232 case kWhatStart:
233 {
234 sp<RefBase> obj;
235 CHECK(msg->findObject("meta", &obj));
236
237 {
238 Mutexed<Queue>::Locked queue(mQueue);
239 queue->mPulling = true;
240 }
241
242 status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
243
244 if (err == OK) {
245 schedulePull();
246 }
247
248 sp<AMessage> response = new AMessage;
249 response->setInt32("err", err);
250
251 sp<AReplyToken> replyID;
252 CHECK(msg->senderAwaitsResponse(&replyID));
253 response->postReply(replyID);
254 break;
255 }
256
257 case kWhatSetStopTimeUs:
258 {
259 sp<AReplyToken> replyID;
260 CHECK(msg->senderAwaitsResponse(&replyID));
261 int64_t stopTimeUs;
262 CHECK(msg->findInt64("stop-time-us", &stopTimeUs));
263 status_t err = mSource->setStopTimeUs(stopTimeUs);
264
265 sp<AMessage> response = new AMessage;
266 response->setInt32("err", err);
267 response->postReply(replyID);
268 break;
269 }
270
271 case kWhatStop:
272 {
273 mSource->stop();
274
275 sp<AMessage> response = new AMessage;
276 response->setInt32("err", OK);
277
278 sp<AReplyToken> replyID;
279 CHECK(msg->senderAwaitsResponse(&replyID));
280 response->postReply(replyID);
281 break;
282 }
283
284 case kWhatPull:
285 {
286 Mutexed<Queue>::Locked queue(mQueue);
287 queue->mReadPendingSince = ALooper::GetNowUs();
288 if (!queue->mPulling) {
289 handleEOS();
290 break;
291 }
292
293 queue.unlock();
294 MediaBufferBase *mbuf = NULL;
295 status_t err = mSource->read(&mbuf);
296 queue.lock();
297
298 queue->mReadPendingSince = 0;
299 // if we need to discard buffer
300 if (!queue->mPulling || queue->mPaused || err != OK) {
301 if (mbuf != NULL) {
302 mbuf->release();
303 mbuf = NULL;
304 }
305 if (queue->mPulling && err == OK) {
306 msg->post(); // if simply paused, keep pulling source
307 break;
308 } else if (err == ERROR_END_OF_STREAM) {
309 ALOGV("stream ended, mbuf %p", mbuf);
310 } else if (err != OK) {
311 ALOGE("error %d reading stream.", err);
312 }
313 }
314
315 if (mbuf != NULL) {
316 queue->pushBuffer(mbuf);
317 }
318
319 queue.unlock();
320
321 if (mbuf != NULL) {
322 mNotify->post();
323 msg->post();
324 } else {
325 handleEOS();
326 }
327 break;
328 }
329
330 default:
331 TRESPASS();
332 }
333 }
334
Output()335 MediaCodecSource::Output::Output()
336 : mEncoderReachedEOS(false),
337 mErrorCode(OK) {
338 }
339
340 // static
Create(const sp<ALooper> & looper,const sp<AMessage> & format,const sp<MediaSource> & source,const sp<PersistentSurface> & persistentSurface,uint32_t flags)341 sp<MediaCodecSource> MediaCodecSource::Create(
342 const sp<ALooper> &looper,
343 const sp<AMessage> &format,
344 const sp<MediaSource> &source,
345 const sp<PersistentSurface> &persistentSurface,
346 uint32_t flags) {
347 sp<MediaCodecSource> mediaSource = new MediaCodecSource(
348 looper, format, source, persistentSurface, flags);
349
350 if (mediaSource->init() == OK) {
351 return mediaSource;
352 }
353 return NULL;
354 }
355
setInputBufferTimeOffset(int64_t timeOffsetUs)356 status_t MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
357 sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
358 msg->setInt64(PARAMETER_KEY_OFFSET_TIME, timeOffsetUs);
359 return postSynchronouslyAndReturnError(msg);
360 }
361
getFirstSampleSystemTimeUs()362 int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
363 sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
364 sp<AMessage> response;
365 msg->postAndAwaitResponse(&response);
366 int64_t timeUs;
367 if (!response->findInt64("time-us", &timeUs)) {
368 timeUs = -1LL;
369 }
370 return timeUs;
371 }
372
start(MetaData * params)373 status_t MediaCodecSource::start(MetaData* params) {
374 sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
375 msg->setObject("meta", params);
376 return postSynchronouslyAndReturnError(msg);
377 }
378
stop()379 status_t MediaCodecSource::stop() {
380 sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
381 return postSynchronouslyAndReturnError(msg);
382 }
383
384
setStopTimeUs(int64_t stopTimeUs)385 status_t MediaCodecSource::setStopTimeUs(int64_t stopTimeUs) {
386 sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, mReflector);
387 msg->setInt64("stop-time-us", stopTimeUs);
388 return postSynchronouslyAndReturnError(msg);
389 }
390
pause(MetaData * params)391 status_t MediaCodecSource::pause(MetaData* params) {
392 sp<AMessage> msg = new AMessage(kWhatPause, mReflector);
393 msg->setObject("meta", params);
394 msg->post();
395 return OK;
396 }
397
getFormat()398 sp<MetaData> MediaCodecSource::getFormat() {
399 Mutexed<sp<MetaData>>::Locked meta(mMeta);
400 return *meta;
401 }
402
getGraphicBufferProducer()403 sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
404 CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
405 return mGraphicBufferProducer;
406 }
407
read(MediaBufferBase ** buffer,const ReadOptions *)408 status_t MediaCodecSource::read(
409 MediaBufferBase** buffer, const ReadOptions* /* options */) {
410 Mutexed<Output>::Locked output(mOutput);
411
412 *buffer = NULL;
413 while (output->mBufferQueue.size() == 0 && !output->mEncoderReachedEOS) {
414 output.waitForCondition(output->mCond);
415 }
416 if (!output->mEncoderReachedEOS) {
417 *buffer = *output->mBufferQueue.begin();
418 output->mBufferQueue.erase(output->mBufferQueue.begin());
419 return OK;
420 }
421 return output->mErrorCode;
422 }
423
signalBufferReturned(MediaBufferBase * buffer)424 void MediaCodecSource::signalBufferReturned(MediaBufferBase *buffer) {
425 buffer->setObserver(0);
426 buffer->release();
427 }
428
setEncodingBitrate(int32_t bitRate)429 status_t MediaCodecSource::setEncodingBitrate(int32_t bitRate) {
430 ALOGV("setEncodingBitrate (%d)", bitRate);
431
432 if (mEncoder == NULL) {
433 ALOGW("setEncodingBitrate (%d) : mEncoder is null", bitRate);
434 return BAD_VALUE;
435 }
436
437 sp<AMessage> params = new AMessage;
438 params->setInt32("video-bitrate", bitRate);
439
440 return mEncoder->setParameters(params);
441 }
442
requestIDRFrame()443 status_t MediaCodecSource::requestIDRFrame() {
444 if (mEncoder == NULL) {
445 ALOGW("requestIDRFrame : mEncoder is null");
446 return BAD_VALUE;
447 } else {
448 mEncoder->requestIDRFrame();
449 return OK;
450 }
451 }
452
MediaCodecSource(const sp<ALooper> & looper,const sp<AMessage> & outputFormat,const sp<MediaSource> & source,const sp<PersistentSurface> & persistentSurface,uint32_t flags)453 MediaCodecSource::MediaCodecSource(
454 const sp<ALooper> &looper,
455 const sp<AMessage> &outputFormat,
456 const sp<MediaSource> &source,
457 const sp<PersistentSurface> &persistentSurface,
458 uint32_t flags)
459 : mLooper(looper),
460 mOutputFormat(outputFormat),
461 mMeta(new MetaData),
462 mFlags(flags),
463 mIsVideo(false),
464 mStarted(false),
465 mStopping(false),
466 mDoMoreWorkPending(false),
467 mSetEncoderFormat(false),
468 mEncoderFormat(0),
469 mEncoderDataSpace(0),
470 mPersistentSurface(persistentSurface),
471 mInputBufferTimeOffsetUs(0),
472 mFirstSampleSystemTimeUs(-1LL),
473 mPausePending(false),
474 mFirstSampleTimeUs(-1LL),
475 mGeneration(0) {
476 CHECK(mLooper != NULL);
477
478 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
479 mPuller = new Puller(source);
480 }
481 }
482
~MediaCodecSource()483 MediaCodecSource::~MediaCodecSource() {
484 releaseEncoder();
485
486 mCodecLooper->stop();
487 mLooper->unregisterHandler(mReflector->id());
488 }
489
init()490 status_t MediaCodecSource::init() {
491 status_t err = initEncoder();
492
493 if (err != OK) {
494 releaseEncoder();
495 }
496
497 return err;
498 }
499
initEncoder()500 status_t MediaCodecSource::initEncoder() {
501
502 mReflector = new AHandlerReflector<MediaCodecSource>(this);
503 mLooper->registerHandler(mReflector);
504
505 mCodecLooper = new ALooper;
506 mCodecLooper->setName("codec_looper");
507 mCodecLooper->start();
508
509 if (mFlags & FLAG_USE_SURFACE_INPUT) {
510 mOutputFormat->setInt32(KEY_CREATE_INPUT_SURFACE_SUSPENDED, 1);
511 }
512
513 AString outputMIME;
514 CHECK(mOutputFormat->findString("mime", &outputMIME));
515 mIsVideo = outputMIME.startsWithIgnoreCase("video/");
516
517 AString name;
518 status_t err = NO_INIT;
519 if (mOutputFormat->findString("testing-name", &name)) {
520 mEncoder = MediaCodec::CreateByComponentName(mCodecLooper, name);
521
522 mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
523 mEncoder->setCallback(mEncoderActivityNotify);
524
525 err = mEncoder->configure(
526 mOutputFormat,
527 NULL /* nativeWindow */,
528 NULL /* crypto */,
529 MediaCodec::CONFIGURE_FLAG_ENCODE);
530 } else {
531 Vector<AString> matchingCodecs;
532 MediaCodecList::findMatchingCodecs(
533 outputMIME.c_str(), true /* encoder */,
534 ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
535 &matchingCodecs);
536
537 for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
538 mEncoder = MediaCodec::CreateByComponentName(
539 mCodecLooper, matchingCodecs[ix]);
540
541 if (mEncoder == NULL) {
542 continue;
543 }
544
545 ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
546
547 mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
548 mEncoder->setCallback(mEncoderActivityNotify);
549
550 err = mEncoder->configure(
551 mOutputFormat,
552 NULL /* nativeWindow */,
553 NULL /* crypto */,
554 MediaCodec::CONFIGURE_FLAG_ENCODE);
555
556 if (err == OK) {
557 break;
558 }
559 mEncoder->release();
560 mEncoder = NULL;
561 }
562 }
563
564 if (err != OK) {
565 return err;
566 }
567
568 mEncoder->getOutputFormat(&mOutputFormat);
569 sp<MetaData> meta = new MetaData;
570 convertMessageToMetaData(mOutputFormat, meta);
571 mMeta.lock().set(meta);
572
573 if (mFlags & FLAG_USE_SURFACE_INPUT) {
574 CHECK(mIsVideo);
575
576 if (mPersistentSurface != NULL) {
577 // When using persistent surface, we are only interested in the
578 // consumer, but have to use PersistentSurface as a wrapper to
579 // pass consumer over messages (similar to BufferProducerWrapper)
580 err = mEncoder->setInputSurface(mPersistentSurface);
581 } else {
582 err = mEncoder->createInputSurface(&mGraphicBufferProducer);
583 }
584
585 if (err != OK) {
586 return err;
587 }
588 }
589
590 sp<AMessage> inputFormat;
591 int32_t usingSwReadOften;
592 mSetEncoderFormat = false;
593 if (mEncoder->getInputFormat(&inputFormat) == OK) {
594 mSetEncoderFormat = true;
595 if (inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
596 && usingSwReadOften) {
597 // this is a SW encoder; signal source to allocate SW readable buffers
598 mEncoderFormat = kDefaultSwVideoEncoderFormat;
599 } else {
600 mEncoderFormat = kDefaultHwVideoEncoderFormat;
601 }
602 if (!inputFormat->findInt32("android._dataspace", &mEncoderDataSpace)) {
603 mEncoderDataSpace = kDefaultVideoEncoderDataSpace;
604 }
605 ALOGV("setting dataspace %#x, format %#x", mEncoderDataSpace, mEncoderFormat);
606 }
607
608 err = mEncoder->start();
609
610 if (err != OK) {
611 return err;
612 }
613
614 {
615 Mutexed<Output>::Locked output(mOutput);
616 output->mEncoderReachedEOS = false;
617 output->mErrorCode = OK;
618 }
619
620 return OK;
621 }
622
releaseEncoder()623 void MediaCodecSource::releaseEncoder() {
624 if (mEncoder == NULL) {
625 return;
626 }
627
628 mEncoder->release();
629 mEncoder.clear();
630 }
631
postSynchronouslyAndReturnError(const sp<AMessage> & msg)632 status_t MediaCodecSource::postSynchronouslyAndReturnError(
633 const sp<AMessage> &msg) {
634 sp<AMessage> response;
635 status_t err = msg->postAndAwaitResponse(&response);
636
637 if (err != OK) {
638 return err;
639 }
640
641 if (!response->findInt32("err", &err)) {
642 err = OK;
643 }
644
645 return err;
646 }
647
signalEOS(status_t err)648 void MediaCodecSource::signalEOS(status_t err) {
649 bool reachedEOS = false;
650 {
651 Mutexed<Output>::Locked output(mOutput);
652 reachedEOS = output->mEncoderReachedEOS;
653 if (!reachedEOS) {
654 ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
655 // release all unread media buffers
656 for (List<MediaBufferBase*>::iterator it = output->mBufferQueue.begin();
657 it != output->mBufferQueue.end(); it++) {
658 (*it)->release();
659 }
660 output->mBufferQueue.clear();
661 output->mEncoderReachedEOS = true;
662 output->mErrorCode = err;
663 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
664 mStopping = true;
665 mPuller->stop();
666 }
667 output->mCond.signal();
668
669 reachedEOS = true;
670 output.unlock();
671 releaseEncoder();
672 }
673 }
674
675 if (mStopping && reachedEOS) {
676 ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
677 if (mPuller != NULL) {
678 mPuller->interruptSource();
679 }
680 ALOGI("source (%s) stopped", mIsVideo ? "video" : "audio");
681 // posting reply to everyone that's waiting
682 List<sp<AReplyToken>>::iterator it;
683 for (it = mStopReplyIDQueue.begin();
684 it != mStopReplyIDQueue.end(); it++) {
685 (new AMessage)->postReply(*it);
686 }
687 mStopReplyIDQueue.clear();
688 mStopping = false;
689 ++mGeneration;
690 }
691 }
692
resume(int64_t resumeStartTimeUs)693 void MediaCodecSource::resume(int64_t resumeStartTimeUs) {
694 CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
695 if (mEncoder != NULL) {
696 sp<AMessage> params = new AMessage;
697 params->setInt32(PARAMETER_KEY_SUSPEND, false);
698 if (resumeStartTimeUs > 0) {
699 params->setInt64(PARAMETER_KEY_SUSPEND_TIME, resumeStartTimeUs);
700 }
701 mEncoder->setParameters(params);
702 }
703 }
704
feedEncoderInputBuffers()705 status_t MediaCodecSource::feedEncoderInputBuffers() {
706 MediaBufferBase* mbuf = NULL;
707 while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
708 if (!mEncoder) {
709 return BAD_VALUE;
710 }
711 size_t bufferIndex = *mAvailEncoderInputIndices.begin();
712 mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
713
714 int64_t timeUs = 0LL;
715 uint32_t flags = 0;
716 size_t size = 0;
717
718 if (mbuf != NULL) {
719 CHECK(mbuf->meta_data().findInt64(kKeyTime, &timeUs));
720 if (mFirstSampleSystemTimeUs < 0LL) {
721 mFirstSampleSystemTimeUs = systemTime() / 1000;
722 if (mPausePending) {
723 mPausePending = false;
724 onPause(mFirstSampleSystemTimeUs);
725 mbuf->release();
726 mAvailEncoderInputIndices.push_back(bufferIndex);
727 return OK;
728 }
729 }
730
731 timeUs += mInputBufferTimeOffsetUs;
732
733 // push decoding time for video, or drift time for audio
734 if (mIsVideo) {
735 mDecodingTimeQueue.push_back(timeUs);
736 } else {
737 #if DEBUG_DRIFT_TIME
738 if (mFirstSampleTimeUs < 0ll) {
739 mFirstSampleTimeUs = timeUs;
740 }
741 int64_t driftTimeUs = 0;
742 if (mbuf->meta_data().findInt64(kKeyDriftTime, &driftTimeUs)
743 && driftTimeUs) {
744 driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
745 }
746 mDriftTimeQueue.push_back(driftTimeUs);
747 #endif // DEBUG_DRIFT_TIME
748 }
749
750 sp<MediaCodecBuffer> inbuf;
751 status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
752
753 if (err != OK || inbuf == NULL || inbuf->data() == NULL
754 || mbuf->data() == NULL || mbuf->size() == 0) {
755 mbuf->release();
756 signalEOS();
757 break;
758 }
759
760 size = mbuf->size();
761
762 memcpy(inbuf->data(), mbuf->data(), size);
763
764 if (mIsVideo) {
765 int32_t ds = 0;
766 if (mbuf->meta_data().findInt32(kKeyColorSpace, &ds)
767 && ds != HAL_DATASPACE_UNKNOWN) {
768 android_dataspace dataspace = static_cast<android_dataspace>(ds);
769 ColorUtils::convertDataSpaceToV0(dataspace);
770 ALOGD("Updating dataspace to %x", dataspace);
771 int32_t standard, transfer, range;
772 ColorUtils::getColorConfigFromDataSpace(
773 dataspace, &range, &standard, &transfer);
774 sp<AMessage> msg = new AMessage;
775 msg->setInt32(KEY_COLOR_STANDARD, standard);
776 msg->setInt32(KEY_COLOR_TRANSFER, transfer);
777 msg->setInt32(KEY_COLOR_RANGE, range);
778 msg->setInt32("android._dataspace", dataspace);
779 mEncoder->setParameters(msg);
780 }
781
782 // video encoder will release MediaBuffer when done
783 // with underlying data.
784 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
785 mbuf->release();
786 } else {
787 mbuf->release();
788 }
789 } else {
790 flags = MediaCodec::BUFFER_FLAG_EOS;
791 }
792
793 status_t err = mEncoder->queueInputBuffer(
794 bufferIndex, 0, size, timeUs, flags);
795
796 if (err != OK) {
797 return err;
798 }
799 }
800
801 return OK;
802 }
803
onStart(MetaData * params)804 status_t MediaCodecSource::onStart(MetaData *params) {
805 if (mStopping || mOutput.lock()->mEncoderReachedEOS) {
806 ALOGE("Failed to start while we're stopping or encoder already stopped due to EOS error");
807 return INVALID_OPERATION;
808 }
809 int64_t startTimeUs;
810 if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
811 startTimeUs = -1LL;
812 }
813
814 if (mStarted) {
815 ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
816 if (mPausePending) {
817 mPausePending = false;
818 return OK;
819 }
820 if (mIsVideo) {
821 mEncoder->requestIDRFrame();
822 }
823 if (mFlags & FLAG_USE_SURFACE_INPUT) {
824 resume(startTimeUs);
825 } else {
826 CHECK(mPuller != NULL);
827 mPuller->resume();
828 }
829 return OK;
830 }
831
832 ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
833
834 status_t err = OK;
835
836 if (mFlags & FLAG_USE_SURFACE_INPUT) {
837 if (mEncoder != NULL) {
838 sp<AMessage> params = new AMessage;
839 params->setInt32(PARAMETER_KEY_SUSPEND, false);
840 if (startTimeUs >= 0) {
841 params->setInt64("skip-frames-before", startTimeUs);
842 }
843 mEncoder->setParameters(params);
844 }
845 } else {
846 CHECK(mPuller != NULL);
847 sp<MetaData> meta = params;
848 if (mSetEncoderFormat) {
849 if (meta == NULL) {
850 meta = new MetaData;
851 }
852 meta->setInt32(kKeyPixelFormat, mEncoderFormat);
853 meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
854 }
855
856 sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
857 err = mPuller->start(meta.get(), notify);
858 if (err != OK) {
859 return err;
860 }
861 }
862
863 ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
864
865 mStarted = true;
866 return OK;
867 }
868
onPause(int64_t pauseStartTimeUs)869 void MediaCodecSource::onPause(int64_t pauseStartTimeUs) {
870 if (mStopping || mOutput.lock()->mEncoderReachedEOS) {
871 // Nothing to do
872 } else if ((mFlags & FLAG_USE_SURFACE_INPUT) && (mEncoder != NULL)) {
873 sp<AMessage> params = new AMessage;
874 params->setInt32(PARAMETER_KEY_SUSPEND, true);
875 params->setInt64(PARAMETER_KEY_SUSPEND_TIME, pauseStartTimeUs);
876 mEncoder->setParameters(params);
877 } else {
878 CHECK(mPuller != NULL);
879 mPuller->pause();
880 }
881 }
882
onMessageReceived(const sp<AMessage> & msg)883 void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
884 switch (msg->what()) {
885 case kWhatPullerNotify:
886 {
887 int32_t eos = 0;
888 if (msg->findInt32("eos", &eos) && eos) {
889 ALOGI("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
890 signalEOS();
891 break;
892 }
893
894 if (mEncoder == NULL) {
895 ALOGV("got msg '%s' after encoder shutdown.", msg->debugString().c_str());
896 break;
897 }
898
899 feedEncoderInputBuffers();
900 break;
901 }
902 case kWhatEncoderActivity:
903 {
904 if (mEncoder == NULL) {
905 break;
906 }
907
908 int32_t cbID;
909 CHECK(msg->findInt32("callbackID", &cbID));
910 if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
911 int32_t index;
912 CHECK(msg->findInt32("index", &index));
913
914 mAvailEncoderInputIndices.push_back(index);
915 feedEncoderInputBuffers();
916 } else if (cbID == MediaCodec::CB_OUTPUT_FORMAT_CHANGED) {
917 status_t err = mEncoder->getOutputFormat(&mOutputFormat);
918 if (err != OK) {
919 signalEOS(err);
920 break;
921 }
922 sp<MetaData> meta = new MetaData;
923 convertMessageToMetaData(mOutputFormat, meta);
924 mMeta.lock().set(meta);
925 } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
926 int32_t index;
927 size_t offset;
928 size_t size;
929 int64_t timeUs;
930 int32_t flags;
931
932 CHECK(msg->findInt32("index", &index));
933 CHECK(msg->findSize("offset", &offset));
934 CHECK(msg->findSize("size", &size));
935 CHECK(msg->findInt64("timeUs", &timeUs));
936 CHECK(msg->findInt32("flags", &flags));
937
938 if (flags & MediaCodec::BUFFER_FLAG_EOS) {
939 mEncoder->releaseOutputBuffer(index);
940 signalEOS();
941 break;
942 }
943
944 sp<MediaCodecBuffer> outbuf;
945 status_t err = mEncoder->getOutputBuffer(index, &outbuf);
946 if (err != OK || outbuf == NULL || outbuf->data() == NULL
947 || outbuf->size() == 0) {
948 signalEOS();
949 break;
950 }
951
952 MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
953 mbuf->setObserver(this);
954 mbuf->add_ref();
955
956 if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
957 if (mIsVideo) {
958 int64_t decodingTimeUs;
959 if (mFlags & FLAG_USE_SURFACE_INPUT) {
960 if (mFirstSampleSystemTimeUs < 0LL) {
961 mFirstSampleSystemTimeUs = systemTime() / 1000;
962 if (mPausePending) {
963 mPausePending = false;
964 onPause(mFirstSampleSystemTimeUs);
965 mbuf->release();
966 break;
967 }
968 }
969 // Timestamp offset is already adjusted in GraphicBufferSource.
970 // GraphicBufferSource is supposed to discard samples
971 // queued before start, and offset timeUs by start time
972 CHECK_GE(timeUs, 0LL);
973 // TODO:
974 // Decoding time for surface source is unavailable,
975 // use presentation time for now. May need to move
976 // this logic into MediaCodec.
977 decodingTimeUs = timeUs;
978 } else {
979 CHECK(!mDecodingTimeQueue.empty());
980 decodingTimeUs = *(mDecodingTimeQueue.begin());
981 mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
982 }
983 mbuf->meta_data().setInt64(kKeyDecodingTime, decodingTimeUs);
984
985 ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
986 timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
987 } else {
988 int64_t driftTimeUs = 0;
989 #if DEBUG_DRIFT_TIME
990 CHECK(!mDriftTimeQueue.empty());
991 driftTimeUs = *(mDriftTimeQueue.begin());
992 mDriftTimeQueue.erase(mDriftTimeQueue.begin());
993 mbuf->meta_data().setInt64(kKeyDriftTime, driftTimeUs);
994 #endif // DEBUG_DRIFT_TIME
995 ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
996 timeUs, timeUs / 1E6, driftTimeUs);
997 }
998 mbuf->meta_data().setInt64(kKeyTime, timeUs);
999 } else {
1000 mbuf->meta_data().setInt64(kKeyTime, 0LL);
1001 mbuf->meta_data().setInt32(kKeyIsCodecConfig, true);
1002 }
1003 if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
1004 mbuf->meta_data().setInt32(kKeyIsSyncFrame, true);
1005 }
1006 memcpy(mbuf->data(), outbuf->data(), outbuf->size());
1007
1008 {
1009 Mutexed<Output>::Locked output(mOutput);
1010 output->mBufferQueue.push_back(mbuf);
1011 output->mCond.signal();
1012 }
1013
1014 mEncoder->releaseOutputBuffer(index);
1015 } else if (cbID == MediaCodec::CB_ERROR) {
1016 status_t err;
1017 CHECK(msg->findInt32("err", &err));
1018 ALOGE("Encoder (%s) reported error : 0x%x",
1019 mIsVideo ? "video" : "audio", err);
1020 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
1021 mStopping = true;
1022 mPuller->stop();
1023 }
1024 signalEOS();
1025 }
1026 break;
1027 }
1028 case kWhatStart:
1029 {
1030 sp<AReplyToken> replyID;
1031 CHECK(msg->senderAwaitsResponse(&replyID));
1032
1033 sp<RefBase> obj;
1034 CHECK(msg->findObject("meta", &obj));
1035 MetaData *params = static_cast<MetaData *>(obj.get());
1036
1037 sp<AMessage> response = new AMessage;
1038 response->setInt32("err", onStart(params));
1039 response->postReply(replyID);
1040 break;
1041 }
1042 case kWhatStop:
1043 {
1044 ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
1045
1046 sp<AReplyToken> replyID;
1047 CHECK(msg->senderAwaitsResponse(&replyID));
1048
1049 if (mOutput.lock()->mEncoderReachedEOS) {
1050 // if we already reached EOS, reply and return now
1051 ALOGI("encoder (%s) already stopped",
1052 mIsVideo ? "video" : "audio");
1053 (new AMessage)->postReply(replyID);
1054 break;
1055 }
1056
1057 mStopReplyIDQueue.push_back(replyID);
1058 if (mStopping) {
1059 // nothing to do if we're already stopping, reply will be posted
1060 // to all when we're stopped.
1061 break;
1062 }
1063
1064 mStopping = true;
1065
1066 int64_t timeoutUs = kStopTimeoutUs;
1067 // if using surface, signal source EOS and wait for EOS to come back.
1068 // otherwise, stop puller (which also clears the input buffer queue)
1069 // and wait for the EOS message. We cannot call source->stop() because
1070 // the encoder may still be processing input buffers.
1071 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1072 mEncoder->signalEndOfInputStream();
1073 // Increase the timeout if there is delay in the GraphicBufferSource
1074 sp<AMessage> inputFormat;
1075 int64_t stopTimeOffsetUs;
1076 if (mEncoder->getInputFormat(&inputFormat) == OK &&
1077 inputFormat->findInt64("android._stop-time-offset-us", &stopTimeOffsetUs) &&
1078 stopTimeOffsetUs > 0) {
1079 if (stopTimeOffsetUs > kMaxStopTimeOffsetUs) {
1080 ALOGW("Source stopTimeOffsetUs %lld too large, limit at %lld us",
1081 (long long)stopTimeOffsetUs, (long long)kMaxStopTimeOffsetUs);
1082 stopTimeOffsetUs = kMaxStopTimeOffsetUs;
1083 }
1084 timeoutUs += stopTimeOffsetUs;
1085 } else {
1086 // Use kMaxStopTimeOffsetUs if stop time offset is not provided by input source
1087 timeoutUs = kMaxStopTimeOffsetUs;
1088 }
1089 } else {
1090 mPuller->stop();
1091 }
1092
1093 // complete stop even if encoder/puller stalled
1094 sp<AMessage> timeoutMsg = new AMessage(kWhatStopStalled, mReflector);
1095 timeoutMsg->setInt32("generation", mGeneration);
1096 timeoutMsg->post(timeoutUs);
1097 break;
1098 }
1099
1100 case kWhatStopStalled:
1101 {
1102 int32_t generation;
1103 CHECK(msg->findInt32("generation", &generation));
1104 if (generation != mGeneration) {
1105 break;
1106 }
1107 ALOGD("source (%s) stopping stalled", mIsVideo ? "video" : "audio");
1108 signalEOS();
1109 break;
1110 }
1111
1112 case kWhatPause:
1113 {
1114 if (mFirstSampleSystemTimeUs < 0) {
1115 mPausePending = true;
1116 } else {
1117 sp<RefBase> obj;
1118 CHECK(msg->findObject("meta", &obj));
1119 MetaData *params = static_cast<MetaData *>(obj.get());
1120 int64_t pauseStartTimeUs = -1;
1121 if (params == NULL || !params->findInt64(kKeyTime, &pauseStartTimeUs)) {
1122 pauseStartTimeUs = -1LL;
1123 }
1124 onPause(pauseStartTimeUs);
1125 }
1126 break;
1127 }
1128 case kWhatSetInputBufferTimeOffset:
1129 {
1130 sp<AReplyToken> replyID;
1131 CHECK(msg->senderAwaitsResponse(&replyID));
1132 status_t err = OK;
1133 CHECK(msg->findInt64(PARAMETER_KEY_OFFSET_TIME, &mInputBufferTimeOffsetUs));
1134
1135 // Propagate the timestamp offset to GraphicBufferSource.
1136 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1137 sp<AMessage> params = new AMessage;
1138 params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
1139 err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
1140 }
1141
1142 sp<AMessage> response = new AMessage;
1143 response->setInt32("err", err);
1144 response->postReply(replyID);
1145 break;
1146 }
1147 case kWhatSetStopTimeUs:
1148 {
1149 sp<AReplyToken> replyID;
1150 CHECK(msg->senderAwaitsResponse(&replyID));
1151 status_t err = OK;
1152 int64_t stopTimeUs;
1153 CHECK(msg->findInt64("stop-time-us", &stopTimeUs));
1154
1155 // Propagate the stop time to GraphicBufferSource.
1156 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1157 sp<AMessage> params = new AMessage;
1158 params->setInt64("stop-time-us", stopTimeUs);
1159 err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
1160 } else {
1161 err = mPuller->setStopTimeUs(stopTimeUs);
1162 }
1163
1164 sp<AMessage> response = new AMessage;
1165 response->setInt32("err", err);
1166 response->postReply(replyID);
1167 break;
1168 }
1169 case kWhatGetFirstSampleSystemTimeUs:
1170 {
1171 sp<AReplyToken> replyID;
1172 CHECK(msg->senderAwaitsResponse(&replyID));
1173
1174 sp<AMessage> response = new AMessage;
1175 response->setInt64("time-us", mFirstSampleSystemTimeUs);
1176 response->postReply(replyID);
1177 break;
1178 }
1179 default:
1180 TRESPASS();
1181 }
1182 }
1183
1184 } // namespace android
1185