1 /*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "NuPlayerRenderer"
19 #include <utils/Log.h>
20
21 #include "NuPlayerRenderer.h"
22
23 #include <media/stagefright/foundation/ABuffer.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 #include <media/stagefright/foundation/AMessage.h>
26
27 namespace android {
28
29 // static
30 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
31
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<AMessage> & notify,uint32_t flags)32 NuPlayer::Renderer::Renderer(
33 const sp<MediaPlayerBase::AudioSink> &sink,
34 const sp<AMessage> ¬ify,
35 uint32_t flags)
36 : mAudioSink(sink),
37 mNotify(notify),
38 mFlags(flags),
39 mNumFramesWritten(0),
40 mDrainAudioQueuePending(false),
41 mDrainVideoQueuePending(false),
42 mAudioQueueGeneration(0),
43 mVideoQueueGeneration(0),
44 mAnchorTimeMediaUs(-1),
45 mAnchorTimeRealUs(-1),
46 mFlushingAudio(false),
47 mFlushingVideo(false),
48 mHasAudio(false),
49 mHasVideo(false),
50 mSyncQueues(false),
51 mPaused(false),
52 mVideoRenderingStarted(false),
53 mLastPositionUpdateUs(-1ll),
54 mVideoLateByUs(0ll) {
55 }
56
~Renderer()57 NuPlayer::Renderer::~Renderer() {
58 }
59
queueBuffer(bool audio,const sp<ABuffer> & buffer,const sp<AMessage> & notifyConsumed)60 void NuPlayer::Renderer::queueBuffer(
61 bool audio,
62 const sp<ABuffer> &buffer,
63 const sp<AMessage> ¬ifyConsumed) {
64 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
65 msg->setInt32("audio", static_cast<int32_t>(audio));
66 msg->setBuffer("buffer", buffer);
67 msg->setMessage("notifyConsumed", notifyConsumed);
68 msg->post();
69 }
70
queueEOS(bool audio,status_t finalResult)71 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
72 CHECK_NE(finalResult, (status_t)OK);
73
74 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
75 msg->setInt32("audio", static_cast<int32_t>(audio));
76 msg->setInt32("finalResult", finalResult);
77 msg->post();
78 }
79
flush(bool audio)80 void NuPlayer::Renderer::flush(bool audio) {
81 {
82 Mutex::Autolock autoLock(mFlushLock);
83 if (audio) {
84 CHECK(!mFlushingAudio);
85 mFlushingAudio = true;
86 } else {
87 CHECK(!mFlushingVideo);
88 mFlushingVideo = true;
89 }
90 }
91
92 sp<AMessage> msg = new AMessage(kWhatFlush, id());
93 msg->setInt32("audio", static_cast<int32_t>(audio));
94 msg->post();
95 }
96
signalTimeDiscontinuity()97 void NuPlayer::Renderer::signalTimeDiscontinuity() {
98 CHECK(mAudioQueue.empty());
99 CHECK(mVideoQueue.empty());
100 mAnchorTimeMediaUs = -1;
101 mAnchorTimeRealUs = -1;
102 mSyncQueues = mHasAudio && mHasVideo;
103 }
104
pause()105 void NuPlayer::Renderer::pause() {
106 (new AMessage(kWhatPause, id()))->post();
107 }
108
resume()109 void NuPlayer::Renderer::resume() {
110 (new AMessage(kWhatResume, id()))->post();
111 }
112
onMessageReceived(const sp<AMessage> & msg)113 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
114 switch (msg->what()) {
115 case kWhatDrainAudioQueue:
116 {
117 int32_t generation;
118 CHECK(msg->findInt32("generation", &generation));
119 if (generation != mAudioQueueGeneration) {
120 break;
121 }
122
123 mDrainAudioQueuePending = false;
124
125 if (onDrainAudioQueue()) {
126 uint32_t numFramesPlayed;
127 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
128 (status_t)OK);
129
130 uint32_t numFramesPendingPlayout =
131 mNumFramesWritten - numFramesPlayed;
132
133 // This is how long the audio sink will have data to
134 // play back.
135 int64_t delayUs =
136 mAudioSink->msecsPerFrame()
137 * numFramesPendingPlayout * 1000ll;
138
139 // Let's give it more data after about half that time
140 // has elapsed.
141 postDrainAudioQueue(delayUs / 2);
142 }
143 break;
144 }
145
146 case kWhatDrainVideoQueue:
147 {
148 int32_t generation;
149 CHECK(msg->findInt32("generation", &generation));
150 if (generation != mVideoQueueGeneration) {
151 break;
152 }
153
154 mDrainVideoQueuePending = false;
155
156 onDrainVideoQueue();
157
158 postDrainVideoQueue();
159 break;
160 }
161
162 case kWhatQueueBuffer:
163 {
164 onQueueBuffer(msg);
165 break;
166 }
167
168 case kWhatQueueEOS:
169 {
170 onQueueEOS(msg);
171 break;
172 }
173
174 case kWhatFlush:
175 {
176 onFlush(msg);
177 break;
178 }
179
180 case kWhatAudioSinkChanged:
181 {
182 onAudioSinkChanged();
183 break;
184 }
185
186 case kWhatPause:
187 {
188 onPause();
189 break;
190 }
191
192 case kWhatResume:
193 {
194 onResume();
195 break;
196 }
197
198 default:
199 TRESPASS();
200 break;
201 }
202 }
203
postDrainAudioQueue(int64_t delayUs)204 void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
205 if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
206 return;
207 }
208
209 if (mAudioQueue.empty()) {
210 return;
211 }
212
213 mDrainAudioQueuePending = true;
214 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
215 msg->setInt32("generation", mAudioQueueGeneration);
216 msg->post(delayUs);
217 }
218
signalAudioSinkChanged()219 void NuPlayer::Renderer::signalAudioSinkChanged() {
220 (new AMessage(kWhatAudioSinkChanged, id()))->post();
221 }
222
onDrainAudioQueue()223 bool NuPlayer::Renderer::onDrainAudioQueue() {
224 uint32_t numFramesPlayed;
225 if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
226 return false;
227 }
228
229 ssize_t numFramesAvailableToWrite =
230 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
231
232 #if 0
233 if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
234 ALOGI("audio sink underrun");
235 } else {
236 ALOGV("audio queue has %d frames left to play",
237 mAudioSink->frameCount() - numFramesAvailableToWrite);
238 }
239 #endif
240
241 size_t numBytesAvailableToWrite =
242 numFramesAvailableToWrite * mAudioSink->frameSize();
243
244 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
245 QueueEntry *entry = &*mAudioQueue.begin();
246
247 if (entry->mBuffer == NULL) {
248 // EOS
249
250 notifyEOS(true /* audio */, entry->mFinalResult);
251
252 mAudioQueue.erase(mAudioQueue.begin());
253 entry = NULL;
254 return false;
255 }
256
257 if (entry->mOffset == 0) {
258 int64_t mediaTimeUs;
259 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
260
261 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
262
263 mAnchorTimeMediaUs = mediaTimeUs;
264
265 uint32_t numFramesPlayed;
266 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
267
268 uint32_t numFramesPendingPlayout =
269 mNumFramesWritten - numFramesPlayed;
270
271 int64_t realTimeOffsetUs =
272 (mAudioSink->latency() / 2 /* XXX */
273 + numFramesPendingPlayout
274 * mAudioSink->msecsPerFrame()) * 1000ll;
275
276 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
277
278 mAnchorTimeRealUs =
279 ALooper::GetNowUs() + realTimeOffsetUs;
280 }
281
282 size_t copy = entry->mBuffer->size() - entry->mOffset;
283 if (copy > numBytesAvailableToWrite) {
284 copy = numBytesAvailableToWrite;
285 }
286
287 CHECK_EQ(mAudioSink->write(
288 entry->mBuffer->data() + entry->mOffset, copy),
289 (ssize_t)copy);
290
291 entry->mOffset += copy;
292 if (entry->mOffset == entry->mBuffer->size()) {
293 entry->mNotifyConsumed->post();
294 mAudioQueue.erase(mAudioQueue.begin());
295
296 entry = NULL;
297 }
298
299 numBytesAvailableToWrite -= copy;
300 size_t copiedFrames = copy / mAudioSink->frameSize();
301 mNumFramesWritten += copiedFrames;
302 }
303
304 notifyPosition();
305
306 return !mAudioQueue.empty();
307 }
308
postDrainVideoQueue()309 void NuPlayer::Renderer::postDrainVideoQueue() {
310 if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
311 return;
312 }
313
314 if (mVideoQueue.empty()) {
315 return;
316 }
317
318 QueueEntry &entry = *mVideoQueue.begin();
319
320 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
321 msg->setInt32("generation", mVideoQueueGeneration);
322
323 int64_t delayUs;
324
325 if (entry.mBuffer == NULL) {
326 // EOS doesn't carry a timestamp.
327 delayUs = 0;
328 } else if (mFlags & FLAG_REAL_TIME) {
329 int64_t mediaTimeUs;
330 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
331
332 delayUs = mediaTimeUs - ALooper::GetNowUs();
333 } else {
334 int64_t mediaTimeUs;
335 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
336
337 if (mAnchorTimeMediaUs < 0) {
338 delayUs = 0;
339
340 if (!mHasAudio) {
341 mAnchorTimeMediaUs = mediaTimeUs;
342 mAnchorTimeRealUs = ALooper::GetNowUs();
343 }
344 } else {
345 int64_t realTimeUs =
346 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
347
348 delayUs = realTimeUs - ALooper::GetNowUs();
349 }
350 }
351
352 msg->post(delayUs);
353
354 mDrainVideoQueuePending = true;
355 }
356
onDrainVideoQueue()357 void NuPlayer::Renderer::onDrainVideoQueue() {
358 if (mVideoQueue.empty()) {
359 return;
360 }
361
362 QueueEntry *entry = &*mVideoQueue.begin();
363
364 if (entry->mBuffer == NULL) {
365 // EOS
366
367 notifyEOS(false /* audio */, entry->mFinalResult);
368
369 mVideoQueue.erase(mVideoQueue.begin());
370 entry = NULL;
371
372 mVideoLateByUs = 0ll;
373
374 notifyPosition();
375 return;
376 }
377
378 int64_t realTimeUs;
379 if (mFlags & FLAG_REAL_TIME) {
380 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
381 } else {
382 int64_t mediaTimeUs;
383 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
384
385 realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
386 }
387
388 mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
389 bool tooLate = (mVideoLateByUs > 40000);
390
391 if (tooLate) {
392 ALOGV("video late by %lld us (%.2f secs)",
393 mVideoLateByUs, mVideoLateByUs / 1E6);
394 } else {
395 ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
396 }
397
398 entry->mNotifyConsumed->setInt32("render", !tooLate);
399 entry->mNotifyConsumed->post();
400 mVideoQueue.erase(mVideoQueue.begin());
401 entry = NULL;
402
403 if (!mVideoRenderingStarted) {
404 mVideoRenderingStarted = true;
405 notifyVideoRenderingStart();
406 }
407
408 notifyPosition();
409 }
410
notifyVideoRenderingStart()411 void NuPlayer::Renderer::notifyVideoRenderingStart() {
412 sp<AMessage> notify = mNotify->dup();
413 notify->setInt32("what", kWhatVideoRenderingStart);
414 notify->post();
415 }
416
notifyEOS(bool audio,status_t finalResult)417 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
418 sp<AMessage> notify = mNotify->dup();
419 notify->setInt32("what", kWhatEOS);
420 notify->setInt32("audio", static_cast<int32_t>(audio));
421 notify->setInt32("finalResult", finalResult);
422 notify->post();
423 }
424
onQueueBuffer(const sp<AMessage> & msg)425 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
426 int32_t audio;
427 CHECK(msg->findInt32("audio", &audio));
428
429 if (audio) {
430 mHasAudio = true;
431 } else {
432 mHasVideo = true;
433 }
434
435 if (dropBufferWhileFlushing(audio, msg)) {
436 return;
437 }
438
439 sp<ABuffer> buffer;
440 CHECK(msg->findBuffer("buffer", &buffer));
441
442 sp<AMessage> notifyConsumed;
443 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));
444
445 QueueEntry entry;
446 entry.mBuffer = buffer;
447 entry.mNotifyConsumed = notifyConsumed;
448 entry.mOffset = 0;
449 entry.mFinalResult = OK;
450
451 if (audio) {
452 mAudioQueue.push_back(entry);
453 postDrainAudioQueue();
454 } else {
455 mVideoQueue.push_back(entry);
456 postDrainVideoQueue();
457 }
458
459 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
460 return;
461 }
462
463 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
464 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
465
466 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
467 // EOS signalled on either queue.
468 syncQueuesDone();
469 return;
470 }
471
472 int64_t firstAudioTimeUs;
473 int64_t firstVideoTimeUs;
474 CHECK(firstAudioBuffer->meta()
475 ->findInt64("timeUs", &firstAudioTimeUs));
476 CHECK(firstVideoBuffer->meta()
477 ->findInt64("timeUs", &firstVideoTimeUs));
478
479 int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
480
481 ALOGV("queueDiff = %.2f secs", diff / 1E6);
482
483 if (diff > 100000ll) {
484 // Audio data starts More than 0.1 secs before video.
485 // Drop some audio.
486
487 (*mAudioQueue.begin()).mNotifyConsumed->post();
488 mAudioQueue.erase(mAudioQueue.begin());
489 return;
490 }
491
492 syncQueuesDone();
493 }
494
syncQueuesDone()495 void NuPlayer::Renderer::syncQueuesDone() {
496 if (!mSyncQueues) {
497 return;
498 }
499
500 mSyncQueues = false;
501
502 if (!mAudioQueue.empty()) {
503 postDrainAudioQueue();
504 }
505
506 if (!mVideoQueue.empty()) {
507 postDrainVideoQueue();
508 }
509 }
510
onQueueEOS(const sp<AMessage> & msg)511 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
512 int32_t audio;
513 CHECK(msg->findInt32("audio", &audio));
514
515 if (dropBufferWhileFlushing(audio, msg)) {
516 return;
517 }
518
519 int32_t finalResult;
520 CHECK(msg->findInt32("finalResult", &finalResult));
521
522 QueueEntry entry;
523 entry.mOffset = 0;
524 entry.mFinalResult = finalResult;
525
526 if (audio) {
527 if (mAudioQueue.empty() && mSyncQueues) {
528 syncQueuesDone();
529 }
530 mAudioQueue.push_back(entry);
531 postDrainAudioQueue();
532 } else {
533 if (mVideoQueue.empty() && mSyncQueues) {
534 syncQueuesDone();
535 }
536 mVideoQueue.push_back(entry);
537 postDrainVideoQueue();
538 }
539 }
540
onFlush(const sp<AMessage> & msg)541 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
542 int32_t audio;
543 CHECK(msg->findInt32("audio", &audio));
544
545 // If we're currently syncing the queues, i.e. dropping audio while
546 // aligning the first audio/video buffer times and only one of the
547 // two queues has data, we may starve that queue by not requesting
548 // more buffers from the decoder. If the other source then encounters
549 // a discontinuity that leads to flushing, we'll never find the
550 // corresponding discontinuity on the other queue.
551 // Therefore we'll stop syncing the queues if at least one of them
552 // is flushed.
553 syncQueuesDone();
554
555 if (audio) {
556 flushQueue(&mAudioQueue);
557
558 Mutex::Autolock autoLock(mFlushLock);
559 mFlushingAudio = false;
560
561 mDrainAudioQueuePending = false;
562 ++mAudioQueueGeneration;
563 } else {
564 flushQueue(&mVideoQueue);
565
566 Mutex::Autolock autoLock(mFlushLock);
567 mFlushingVideo = false;
568
569 mDrainVideoQueuePending = false;
570 ++mVideoQueueGeneration;
571 }
572
573 notifyFlushComplete(audio);
574 }
575
flushQueue(List<QueueEntry> * queue)576 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
577 while (!queue->empty()) {
578 QueueEntry *entry = &*queue->begin();
579
580 if (entry->mBuffer != NULL) {
581 entry->mNotifyConsumed->post();
582 }
583
584 queue->erase(queue->begin());
585 entry = NULL;
586 }
587 }
588
notifyFlushComplete(bool audio)589 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
590 sp<AMessage> notify = mNotify->dup();
591 notify->setInt32("what", kWhatFlushComplete);
592 notify->setInt32("audio", static_cast<int32_t>(audio));
593 notify->post();
594 }
595
dropBufferWhileFlushing(bool audio,const sp<AMessage> & msg)596 bool NuPlayer::Renderer::dropBufferWhileFlushing(
597 bool audio, const sp<AMessage> &msg) {
598 bool flushing = false;
599
600 {
601 Mutex::Autolock autoLock(mFlushLock);
602 if (audio) {
603 flushing = mFlushingAudio;
604 } else {
605 flushing = mFlushingVideo;
606 }
607 }
608
609 if (!flushing) {
610 return false;
611 }
612
613 sp<AMessage> notifyConsumed;
614 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) {
615 notifyConsumed->post();
616 }
617
618 return true;
619 }
620
onAudioSinkChanged()621 void NuPlayer::Renderer::onAudioSinkChanged() {
622 CHECK(!mDrainAudioQueuePending);
623 mNumFramesWritten = 0;
624 uint32_t written;
625 if (mAudioSink->getFramesWritten(&written) == OK) {
626 mNumFramesWritten = written;
627 }
628 }
629
notifyPosition()630 void NuPlayer::Renderer::notifyPosition() {
631 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
632 return;
633 }
634
635 int64_t nowUs = ALooper::GetNowUs();
636
637 if (mLastPositionUpdateUs >= 0
638 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
639 return;
640 }
641 mLastPositionUpdateUs = nowUs;
642
643 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
644
645 sp<AMessage> notify = mNotify->dup();
646 notify->setInt32("what", kWhatPosition);
647 notify->setInt64("positionUs", positionUs);
648 notify->setInt64("videoLateByUs", mVideoLateByUs);
649 notify->post();
650 }
651
onPause()652 void NuPlayer::Renderer::onPause() {
653 CHECK(!mPaused);
654
655 mDrainAudioQueuePending = false;
656 ++mAudioQueueGeneration;
657
658 mDrainVideoQueuePending = false;
659 ++mVideoQueueGeneration;
660
661 if (mHasAudio) {
662 mAudioSink->pause();
663 }
664
665 ALOGV("now paused audio queue has %d entries, video has %d entries",
666 mAudioQueue.size(), mVideoQueue.size());
667
668 mPaused = true;
669 }
670
onResume()671 void NuPlayer::Renderer::onResume() {
672 if (!mPaused) {
673 return;
674 }
675
676 if (mHasAudio) {
677 mAudioSink->start();
678 }
679
680 mPaused = false;
681
682 if (!mAudioQueue.empty()) {
683 postDrainAudioQueue();
684 }
685
686 if (!mVideoQueue.empty()) {
687 postDrainVideoQueue();
688 }
689 }
690
691 } // namespace android
692
693