• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "NuPlayerRenderer"
19 #include <utils/Log.h>
20 
21 #include "AWakeLock.h"
22 #include "NuPlayerRenderer.h"
23 #include <algorithm>
24 #include <cutils/properties.h>
25 #include <media/stagefright/foundation/ADebug.h>
26 #include <media/stagefright/foundation/AMessage.h>
27 #include <media/stagefright/foundation/AUtils.h>
28 #include <media/stagefright/MediaClock.h>
29 #include <media/stagefright/MediaCodecConstants.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <media/stagefright/Utils.h>
34 #include <media/stagefright/VideoFrameScheduler.h>
35 #include <media/MediaCodecBuffer.h>
36 #include <utils/SystemClock.h>
37 
38 #include <inttypes.h>
39 
40 namespace android {
41 
42 /*
43  * Example of common configuration settings in shell script form
44 
45    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
46    adb shell setprop audio.offload.disable 1
47 
48    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
49    adb shell setprop audio.offload.video 1
50 
51    #Use audio callbacks for PCM data
52    adb shell setprop media.stagefright.audio.cbk 1
53 
54    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
55    adb shell setprop media.stagefright.audio.deep 1
56 
57    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
58    adb shell setprop media.stagefright.audio.sink 1000
59 
60  * These configurations take effect for the next track played (not the current track).
61  */
62 
getUseAudioCallbackSetting()63 static inline bool getUseAudioCallbackSetting() {
64     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
65 }
66 
getAudioSinkPcmMsSetting()67 static inline int32_t getAudioSinkPcmMsSetting() {
68     return property_get_int32(
69             "media.stagefright.audio.sink", 500 /* default_value */);
70 }
71 
72 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
73 // is closed to allow the audio DSP to power down.
74 static const int64_t kOffloadPauseMaxUs = 10000000LL;
75 
76 // Maximum allowed delay from AudioSink, 1.5 seconds.
77 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
78 
79 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
80 
81 // Default video frame display duration when only video exists.
82 // Used to set max media time in MediaClock.
83 static const int64_t kDefaultVideoFrameIntervalUs = 100000LL;
84 
85 // static
86 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
87         AUDIO_CHANNEL_NONE,
88         AUDIO_OUTPUT_FLAG_NONE,
89         AUDIO_FORMAT_INVALID,
90         0, // mNumChannels
91         0 // mSampleRate
92 };
93 
94 // static
95 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
96 
audioFormatFromEncoding(int32_t pcmEncoding)97 static audio_format_t constexpr audioFormatFromEncoding(int32_t pcmEncoding) {
98     switch (pcmEncoding) {
99     case kAudioEncodingPcmFloat:
100         return AUDIO_FORMAT_PCM_FLOAT;
101     case kAudioEncodingPcm16bit:
102         return AUDIO_FORMAT_PCM_16_BIT;
103     case kAudioEncodingPcm8bit:
104         return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
105     default:
106         ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
107         return AUDIO_FORMAT_INVALID;
108     }
109 }
110 
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<MediaClock> & mediaClock,const sp<AMessage> & notify,uint32_t flags)111 NuPlayer::Renderer::Renderer(
112         const sp<MediaPlayerBase::AudioSink> &sink,
113         const sp<MediaClock> &mediaClock,
114         const sp<AMessage> &notify,
115         uint32_t flags)
116     : mAudioSink(sink),
117       mUseVirtualAudioSink(false),
118       mNotify(notify),
119       mFlags(flags),
120       mNumFramesWritten(0),
121       mDrainAudioQueuePending(false),
122       mDrainVideoQueuePending(false),
123       mAudioQueueGeneration(0),
124       mVideoQueueGeneration(0),
125       mAudioDrainGeneration(0),
126       mVideoDrainGeneration(0),
127       mAudioEOSGeneration(0),
128       mMediaClock(mediaClock),
129       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
130       mAudioFirstAnchorTimeMediaUs(-1),
131       mAnchorTimeMediaUs(-1),
132       mAnchorNumFramesWritten(-1),
133       mVideoLateByUs(0LL),
134       mNextVideoTimeMediaUs(-1),
135       mHasAudio(false),
136       mHasVideo(false),
137       mNotifyCompleteAudio(false),
138       mNotifyCompleteVideo(false),
139       mSyncQueues(false),
140       mPaused(false),
141       mPauseDrainAudioAllowedUs(0),
142       mVideoSampleReceived(false),
143       mVideoRenderingStarted(false),
144       mVideoRenderingStartGeneration(0),
145       mAudioRenderingStartGeneration(0),
146       mRenderingDataDelivered(false),
147       mNextAudioClockUpdateTimeUs(-1),
148       mLastAudioMediaTimeUs(-1),
149       mAudioOffloadPauseTimeoutGeneration(0),
150       mAudioTornDown(false),
151       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
152       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
153       mTotalBuffersQueued(0),
154       mLastAudioBufferDrained(0),
155       mUseAudioCallback(false),
156       mWakeLock(new AWakeLock()) {
157     CHECK(mediaClock != NULL);
158     mPlaybackRate = mPlaybackSettings.mSpeed;
159     mMediaClock->setPlaybackRate(mPlaybackRate);
160     (void)mSyncFlag.test_and_set();
161 }
162 
~Renderer()163 NuPlayer::Renderer::~Renderer() {
164     if (offloadingAudio()) {
165         mAudioSink->stop();
166         mAudioSink->flush();
167         mAudioSink->close();
168     }
169 
170     // Try to avoid racing condition in case callback is still on.
171     Mutex::Autolock autoLock(mLock);
172     if (mUseAudioCallback) {
173         flushQueue(&mAudioQueue);
174         flushQueue(&mVideoQueue);
175     }
176     mWakeLock.clear();
177     mVideoScheduler.clear();
178     mNotify.clear();
179     mAudioSink.clear();
180 }
181 
queueBuffer(bool audio,const sp<MediaCodecBuffer> & buffer,const sp<AMessage> & notifyConsumed)182 void NuPlayer::Renderer::queueBuffer(
183         bool audio,
184         const sp<MediaCodecBuffer> &buffer,
185         const sp<AMessage> &notifyConsumed) {
186     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
187     msg->setInt32("queueGeneration", getQueueGeneration(audio));
188     msg->setInt32("audio", static_cast<int32_t>(audio));
189     msg->setObject("buffer", buffer);
190     msg->setMessage("notifyConsumed", notifyConsumed);
191     msg->post();
192 }
193 
queueEOS(bool audio,status_t finalResult)194 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
195     CHECK_NE(finalResult, (status_t)OK);
196 
197     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
198     msg->setInt32("queueGeneration", getQueueGeneration(audio));
199     msg->setInt32("audio", static_cast<int32_t>(audio));
200     msg->setInt32("finalResult", finalResult);
201     msg->post();
202 }
203 
setPlaybackSettings(const AudioPlaybackRate & rate)204 status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
205     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
206     writeToAMessage(msg, rate);
207     sp<AMessage> response;
208     status_t err = msg->postAndAwaitResponse(&response);
209     if (err == OK && response != NULL) {
210         CHECK(response->findInt32("err", &err));
211     }
212     return err;
213 }
214 
onConfigPlayback(const AudioPlaybackRate & rate)215 status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
216     if (rate.mSpeed == 0.f) {
217         onPause();
218         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
219         // have to correspond to the any non-0 speed (e.g old speed). Keep
220         // settings nonetheless, using the old speed, in case audiosink changes.
221         AudioPlaybackRate newRate = rate;
222         newRate.mSpeed = mPlaybackSettings.mSpeed;
223         mPlaybackSettings = newRate;
224         return OK;
225     }
226 
227     if (mAudioSink != NULL && mAudioSink->ready()) {
228         status_t err = mAudioSink->setPlaybackRate(rate);
229         if (err != OK) {
230             return err;
231         }
232     }
233     mPlaybackSettings = rate;
234     mPlaybackRate = rate.mSpeed;
235     mMediaClock->setPlaybackRate(mPlaybackRate);
236     return OK;
237 }
238 
getPlaybackSettings(AudioPlaybackRate * rate)239 status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
240     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
241     sp<AMessage> response;
242     status_t err = msg->postAndAwaitResponse(&response);
243     if (err == OK && response != NULL) {
244         CHECK(response->findInt32("err", &err));
245         if (err == OK) {
246             readFromAMessage(response, rate);
247         }
248     }
249     return err;
250 }
251 
onGetPlaybackSettings(AudioPlaybackRate * rate)252 status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
253     if (mAudioSink != NULL && mAudioSink->ready()) {
254         status_t err = mAudioSink->getPlaybackRate(rate);
255         if (err == OK) {
256             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
257                 ALOGW("correcting mismatch in internal/external playback rate");
258             }
259             // get playback settings used by audiosink, as it may be
260             // slightly off due to audiosink not taking small changes.
261             mPlaybackSettings = *rate;
262             if (mPaused) {
263                 rate->mSpeed = 0.f;
264             }
265         }
266         return err;
267     }
268     *rate = mPlaybackSettings;
269     return OK;
270 }
271 
setSyncSettings(const AVSyncSettings & sync,float videoFpsHint)272 status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
273     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
274     writeToAMessage(msg, sync, videoFpsHint);
275     sp<AMessage> response;
276     status_t err = msg->postAndAwaitResponse(&response);
277     if (err == OK && response != NULL) {
278         CHECK(response->findInt32("err", &err));
279     }
280     return err;
281 }
282 
onConfigSync(const AVSyncSettings & sync,float videoFpsHint __unused)283 status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
284     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
285         return BAD_VALUE;
286     }
287     // TODO: support sync sources
288     return INVALID_OPERATION;
289 }
290 
getSyncSettings(AVSyncSettings * sync,float * videoFps)291 status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
292     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
293     sp<AMessage> response;
294     status_t err = msg->postAndAwaitResponse(&response);
295     if (err == OK && response != NULL) {
296         CHECK(response->findInt32("err", &err));
297         if (err == OK) {
298             readFromAMessage(response, sync, videoFps);
299         }
300     }
301     return err;
302 }
303 
onGetSyncSettings(AVSyncSettings * sync,float * videoFps)304 status_t NuPlayer::Renderer::onGetSyncSettings(
305         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
306     *sync = mSyncSettings;
307     *videoFps = -1.f;
308     return OK;
309 }
310 
flush(bool audio,bool notifyComplete)311 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
312     {
313         Mutex::Autolock autoLock(mLock);
314         if (audio) {
315             mNotifyCompleteAudio |= notifyComplete;
316             clearAudioFirstAnchorTime_l();
317             ++mAudioQueueGeneration;
318             ++mAudioDrainGeneration;
319         } else {
320             mNotifyCompleteVideo |= notifyComplete;
321             ++mVideoQueueGeneration;
322             ++mVideoDrainGeneration;
323             mNextVideoTimeMediaUs = -1;
324         }
325 
326         mMediaClock->clearAnchor();
327         mVideoLateByUs = 0;
328         mSyncQueues = false;
329     }
330 
331     // Wait until the current job in the message queue is done, to make sure
332     // buffer processing from the old generation is finished. After the current
333     // job is finished, access to buffers are protected by generation.
334     Mutex::Autolock syncLock(mSyncLock);
335     int64_t syncCount = mSyncCount;
336     mSyncFlag.clear();
337 
338     // Make sure message queue is not empty after mSyncFlag is cleared.
339     sp<AMessage> msg = new AMessage(kWhatFlush, this);
340     msg->setInt32("audio", static_cast<int32_t>(audio));
341     msg->post();
342 
343     int64_t uptimeMs = uptimeMillis();
344     while (mSyncCount == syncCount) {
345         (void)mSyncCondition.waitRelative(mSyncLock, ms2ns(1000));
346         if (uptimeMillis() - uptimeMs > 1000) {
347             ALOGW("flush(): no wake-up from sync point for 1s; stop waiting to "
348                   "prevent being stuck indefinitely.");
349             break;
350         }
351     }
352 }
353 
signalTimeDiscontinuity()354 void NuPlayer::Renderer::signalTimeDiscontinuity() {
355 }
356 
signalDisableOffloadAudio()357 void NuPlayer::Renderer::signalDisableOffloadAudio() {
358     (new AMessage(kWhatDisableOffloadAudio, this))->post();
359 }
360 
signalEnableOffloadAudio()361 void NuPlayer::Renderer::signalEnableOffloadAudio() {
362     (new AMessage(kWhatEnableOffloadAudio, this))->post();
363 }
364 
pause()365 void NuPlayer::Renderer::pause() {
366     (new AMessage(kWhatPause, this))->post();
367 }
368 
resume()369 void NuPlayer::Renderer::resume() {
370     (new AMessage(kWhatResume, this))->post();
371 }
372 
setVideoFrameRate(float fps)373 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
374     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
375     msg->setFloat("frame-rate", fps);
376     msg->post();
377 }
378 
379 // Called on any threads without mLock acquired.
getCurrentPosition(int64_t * mediaUs)380 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
381     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
382     if (result == OK) {
383         return result;
384     }
385 
386     // MediaClock has not started yet. Try to start it if possible.
387     {
388         Mutex::Autolock autoLock(mLock);
389         if (mAudioFirstAnchorTimeMediaUs == -1) {
390             return result;
391         }
392 
393         AudioTimestamp ts;
394         status_t res = mAudioSink->getTimestamp(ts);
395         if (res != OK) {
396             return result;
397         }
398 
399         // AudioSink has rendered some frames.
400         int64_t nowUs = ALooper::GetNowUs();
401         int64_t playedOutDurationUs = mAudioSink->getPlayedOutDurationUs(nowUs);
402         if (playedOutDurationUs == 0) {
403             *mediaUs = mAudioFirstAnchorTimeMediaUs;
404             return OK;
405         }
406         int64_t nowMediaUs = playedOutDurationUs + mAudioFirstAnchorTimeMediaUs;
407         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
408     }
409 
410     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
411 }
412 
clearAudioFirstAnchorTime_l()413 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
414     mAudioFirstAnchorTimeMediaUs = -1;
415     mMediaClock->setStartingTimeMedia(-1);
416 }
417 
setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs)418 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
419     if (mAudioFirstAnchorTimeMediaUs == -1) {
420         mAudioFirstAnchorTimeMediaUs = mediaUs;
421         mMediaClock->setStartingTimeMedia(mediaUs);
422     }
423 }
424 
425 // Called on renderer looper.
clearAnchorTime()426 void NuPlayer::Renderer::clearAnchorTime() {
427     mMediaClock->clearAnchor();
428     mAnchorTimeMediaUs = -1;
429     mAnchorNumFramesWritten = -1;
430 }
431 
setVideoLateByUs(int64_t lateUs)432 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
433     Mutex::Autolock autoLock(mLock);
434     mVideoLateByUs = lateUs;
435 }
436 
getVideoLateByUs()437 int64_t NuPlayer::Renderer::getVideoLateByUs() {
438     Mutex::Autolock autoLock(mLock);
439     return mVideoLateByUs;
440 }
441 
openAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool * isOffloaded,bool isStreaming)442 status_t NuPlayer::Renderer::openAudioSink(
443         const sp<AMessage> &format,
444         bool offloadOnly,
445         bool hasVideo,
446         uint32_t flags,
447         bool *isOffloaded,
448         bool isStreaming) {
449     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
450     msg->setMessage("format", format);
451     msg->setInt32("offload-only", offloadOnly);
452     msg->setInt32("has-video", hasVideo);
453     msg->setInt32("flags", flags);
454     msg->setInt32("isStreaming", isStreaming);
455 
456     sp<AMessage> response;
457     status_t postStatus = msg->postAndAwaitResponse(&response);
458 
459     int32_t err;
460     if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
461         err = INVALID_OPERATION;
462     } else if (err == OK && isOffloaded != NULL) {
463         int32_t offload;
464         CHECK(response->findInt32("offload", &offload));
465         *isOffloaded = (offload != 0);
466     }
467     return err;
468 }
469 
closeAudioSink()470 void NuPlayer::Renderer::closeAudioSink() {
471     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
472 
473     sp<AMessage> response;
474     msg->postAndAwaitResponse(&response);
475 }
476 
changeAudioFormat(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming,const sp<AMessage> & notify)477 void NuPlayer::Renderer::changeAudioFormat(
478         const sp<AMessage> &format,
479         bool offloadOnly,
480         bool hasVideo,
481         uint32_t flags,
482         bool isStreaming,
483         const sp<AMessage> &notify) {
484     sp<AMessage> meta = new AMessage;
485     meta->setMessage("format", format);
486     meta->setInt32("offload-only", offloadOnly);
487     meta->setInt32("has-video", hasVideo);
488     meta->setInt32("flags", flags);
489     meta->setInt32("isStreaming", isStreaming);
490 
491     sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
492     msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
493     msg->setMessage("notify", notify);
494     msg->setMessage("meta", meta);
495     msg->post();
496 }
497 
onMessageReceived(const sp<AMessage> & msg)498 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
499     switch (msg->what()) {
500         case kWhatOpenAudioSink:
501         {
502             sp<AMessage> format;
503             CHECK(msg->findMessage("format", &format));
504 
505             int32_t offloadOnly;
506             CHECK(msg->findInt32("offload-only", &offloadOnly));
507 
508             int32_t hasVideo;
509             CHECK(msg->findInt32("has-video", &hasVideo));
510 
511             uint32_t flags;
512             CHECK(msg->findInt32("flags", (int32_t *)&flags));
513 
514             uint32_t isStreaming;
515             CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
516 
517             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
518 
519             sp<AMessage> response = new AMessage;
520             response->setInt32("err", err);
521             response->setInt32("offload", offloadingAudio());
522 
523             sp<AReplyToken> replyID;
524             CHECK(msg->senderAwaitsResponse(&replyID));
525             response->postReply(replyID);
526 
527             break;
528         }
529 
530         case kWhatCloseAudioSink:
531         {
532             sp<AReplyToken> replyID;
533             CHECK(msg->senderAwaitsResponse(&replyID));
534 
535             onCloseAudioSink();
536 
537             sp<AMessage> response = new AMessage;
538             response->postReply(replyID);
539             break;
540         }
541 
542         case kWhatStopAudioSink:
543         {
544             mAudioSink->stop();
545             break;
546         }
547 
548         case kWhatChangeAudioFormat:
549         {
550             int32_t queueGeneration;
551             CHECK(msg->findInt32("queueGeneration", &queueGeneration));
552 
553             sp<AMessage> notify;
554             CHECK(msg->findMessage("notify", &notify));
555 
556             if (offloadingAudio()) {
557                 ALOGW("changeAudioFormat should NOT be called in offload mode");
558                 notify->setInt32("err", INVALID_OPERATION);
559                 notify->post();
560                 break;
561             }
562 
563             sp<AMessage> meta;
564             CHECK(msg->findMessage("meta", &meta));
565 
566             if (queueGeneration != getQueueGeneration(true /* audio */)
567                     || mAudioQueue.empty()) {
568                 onChangeAudioFormat(meta, notify);
569                 break;
570             }
571 
572             QueueEntry entry;
573             entry.mNotifyConsumed = notify;
574             entry.mMeta = meta;
575 
576             Mutex::Autolock autoLock(mLock);
577             mAudioQueue.push_back(entry);
578             postDrainAudioQueue_l();
579 
580             break;
581         }
582 
583         case kWhatDrainAudioQueue:
584         {
585             mDrainAudioQueuePending = false;
586 
587             int32_t generation;
588             CHECK(msg->findInt32("drainGeneration", &generation));
589             if (generation != getDrainGeneration(true /* audio */)) {
590                 break;
591             }
592 
593             if (onDrainAudioQueue()) {
594                 uint32_t numFramesPlayed;
595                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
596                          (status_t)OK);
597 
598                 // Handle AudioTrack race when start is immediately called after flush.
599                 uint32_t numFramesPendingPlayout =
600                     (mNumFramesWritten > numFramesPlayed ?
601                         mNumFramesWritten - numFramesPlayed : 0);
602 
603                 // This is how long the audio sink will have data to
604                 // play back.
605                 int64_t delayUs =
606                     mAudioSink->msecsPerFrame()
607                         * numFramesPendingPlayout * 1000LL;
608                 if (mPlaybackRate > 1.0f) {
609                     delayUs /= mPlaybackRate;
610                 }
611 
612                 // Let's give it more data after about half that time
613                 // has elapsed.
614                 delayUs /= 2;
615                 // check the buffer size to estimate maximum delay permitted.
616                 const int64_t maxDrainDelayUs = std::max(
617                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
618                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
619                         (long long)delayUs, (long long)maxDrainDelayUs);
620                 Mutex::Autolock autoLock(mLock);
621                 postDrainAudioQueue_l(delayUs);
622             }
623             break;
624         }
625 
626         case kWhatDrainVideoQueue:
627         {
628             int32_t generation;
629             CHECK(msg->findInt32("drainGeneration", &generation));
630             if (generation != getDrainGeneration(false /* audio */)) {
631                 break;
632             }
633 
634             mDrainVideoQueuePending = false;
635 
636             onDrainVideoQueue();
637 
638             postDrainVideoQueue();
639             break;
640         }
641 
642         case kWhatPostDrainVideoQueue:
643         {
644             int32_t generation;
645             CHECK(msg->findInt32("drainGeneration", &generation));
646             if (generation != getDrainGeneration(false /* audio */)) {
647                 break;
648             }
649 
650             mDrainVideoQueuePending = false;
651             postDrainVideoQueue();
652             break;
653         }
654 
655         case kWhatQueueBuffer:
656         {
657             onQueueBuffer(msg);
658             break;
659         }
660 
661         case kWhatQueueEOS:
662         {
663             onQueueEOS(msg);
664             break;
665         }
666 
667         case kWhatEOS:
668         {
669             int32_t generation;
670             CHECK(msg->findInt32("audioEOSGeneration", &generation));
671             if (generation != mAudioEOSGeneration) {
672                 break;
673             }
674             status_t finalResult;
675             CHECK(msg->findInt32("finalResult", &finalResult));
676             notifyEOS(true /* audio */, finalResult);
677             break;
678         }
679 
680         case kWhatConfigPlayback:
681         {
682             sp<AReplyToken> replyID;
683             CHECK(msg->senderAwaitsResponse(&replyID));
684             AudioPlaybackRate rate;
685             readFromAMessage(msg, &rate);
686             status_t err = onConfigPlayback(rate);
687             sp<AMessage> response = new AMessage;
688             response->setInt32("err", err);
689             response->postReply(replyID);
690             break;
691         }
692 
693         case kWhatGetPlaybackSettings:
694         {
695             sp<AReplyToken> replyID;
696             CHECK(msg->senderAwaitsResponse(&replyID));
697             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
698             status_t err = onGetPlaybackSettings(&rate);
699             sp<AMessage> response = new AMessage;
700             if (err == OK) {
701                 writeToAMessage(response, rate);
702             }
703             response->setInt32("err", err);
704             response->postReply(replyID);
705             break;
706         }
707 
708         case kWhatConfigSync:
709         {
710             sp<AReplyToken> replyID;
711             CHECK(msg->senderAwaitsResponse(&replyID));
712             AVSyncSettings sync;
713             float videoFpsHint;
714             readFromAMessage(msg, &sync, &videoFpsHint);
715             status_t err = onConfigSync(sync, videoFpsHint);
716             sp<AMessage> response = new AMessage;
717             response->setInt32("err", err);
718             response->postReply(replyID);
719             break;
720         }
721 
722         case kWhatGetSyncSettings:
723         {
724             sp<AReplyToken> replyID;
725             CHECK(msg->senderAwaitsResponse(&replyID));
726 
727             ALOGV("kWhatGetSyncSettings");
728             AVSyncSettings sync;
729             float videoFps = -1.f;
730             status_t err = onGetSyncSettings(&sync, &videoFps);
731             sp<AMessage> response = new AMessage;
732             if (err == OK) {
733                 writeToAMessage(response, sync, videoFps);
734             }
735             response->setInt32("err", err);
736             response->postReply(replyID);
737             break;
738         }
739 
740         case kWhatFlush:
741         {
742             onFlush(msg);
743             break;
744         }
745 
746         case kWhatDisableOffloadAudio:
747         {
748             onDisableOffloadAudio();
749             break;
750         }
751 
752         case kWhatEnableOffloadAudio:
753         {
754             onEnableOffloadAudio();
755             break;
756         }
757 
758         case kWhatPause:
759         {
760             onPause();
761             break;
762         }
763 
764         case kWhatResume:
765         {
766             onResume();
767             break;
768         }
769 
770         case kWhatSetVideoFrameRate:
771         {
772             float fps;
773             CHECK(msg->findFloat("frame-rate", &fps));
774             onSetVideoFrameRate(fps);
775             break;
776         }
777 
778         case kWhatAudioTearDown:
779         {
780             int32_t reason;
781             CHECK(msg->findInt32("reason", &reason));
782 
783             onAudioTearDown((AudioTearDownReason)reason);
784             break;
785         }
786 
787         case kWhatAudioOffloadPauseTimeout:
788         {
789             int32_t generation;
790             CHECK(msg->findInt32("drainGeneration", &generation));
791             if (generation != mAudioOffloadPauseTimeoutGeneration) {
792                 break;
793             }
794             ALOGV("Audio Offload tear down due to pause timeout.");
795             onAudioTearDown(kDueToTimeout);
796             mWakeLock->release();
797             break;
798         }
799 
800         default:
801             TRESPASS();
802             break;
803     }
804     if (!mSyncFlag.test_and_set()) {
805         Mutex::Autolock syncLock(mSyncLock);
806         ++mSyncCount;
807         mSyncCondition.broadcast();
808     }
809 }
810 
postDrainAudioQueue_l(int64_t delayUs)811 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
812     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
813         return;
814     }
815 
816     if (mAudioQueue.empty()) {
817         return;
818     }
819 
820     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
821     if (mPaused) {
822         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
823         if (diffUs > delayUs) {
824             delayUs = diffUs;
825         }
826     }
827 
828     mDrainAudioQueuePending = true;
829     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
830     msg->setInt32("drainGeneration", mAudioDrainGeneration);
831     msg->post(delayUs);
832 }
833 
prepareForMediaRenderingStart_l()834 void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
835     mAudioRenderingStartGeneration = mAudioDrainGeneration;
836     mVideoRenderingStartGeneration = mVideoDrainGeneration;
837     mRenderingDataDelivered = false;
838 }
839 
notifyIfMediaRenderingStarted_l()840 void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
841     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
842         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
843         mRenderingDataDelivered = true;
844         if (mPaused) {
845             return;
846         }
847         mVideoRenderingStartGeneration = -1;
848         mAudioRenderingStartGeneration = -1;
849 
850         sp<AMessage> notify = mNotify->dup();
851         notify->setInt32("what", kWhatMediaRenderingStart);
852         notify->post();
853     }
854 }
855 
856 // static
AudioSinkCallback(MediaPlayerBase::AudioSink *,void * buffer,size_t size,void * cookie,MediaPlayerBase::AudioSink::cb_event_t event)857 size_t NuPlayer::Renderer::AudioSinkCallback(
858         MediaPlayerBase::AudioSink * /* audioSink */,
859         void *buffer,
860         size_t size,
861         void *cookie,
862         MediaPlayerBase::AudioSink::cb_event_t event) {
863     NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
864 
865     switch (event) {
866         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
867         {
868             return me->fillAudioBuffer(buffer, size);
869             break;
870         }
871 
872         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
873         {
874             ALOGV("AudioSink::CB_EVENT_STREAM_END");
875             me->notifyEOSCallback();
876             break;
877         }
878 
879         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
880         {
881             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
882             me->notifyAudioTearDown(kDueToError);
883             break;
884         }
885     }
886 
887     return 0;
888 }
889 
notifyEOSCallback()890 void NuPlayer::Renderer::notifyEOSCallback() {
891     Mutex::Autolock autoLock(mLock);
892 
893     if (!mUseAudioCallback) {
894         return;
895     }
896 
897     notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
898 }
899 
fillAudioBuffer(void * buffer,size_t size)900 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
901     Mutex::Autolock autoLock(mLock);
902 
903     if (!mUseAudioCallback) {
904         return 0;
905     }
906 
907     bool hasEOS = false;
908 
909     size_t sizeCopied = 0;
910     bool firstEntry = true;
911     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
912     while (sizeCopied < size && !mAudioQueue.empty()) {
913         entry = &*mAudioQueue.begin();
914 
915         if (entry->mBuffer == NULL) { // EOS
916             hasEOS = true;
917             mAudioQueue.erase(mAudioQueue.begin());
918             break;
919         }
920 
921         if (firstEntry && entry->mOffset == 0) {
922             firstEntry = false;
923             int64_t mediaTimeUs;
924             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
925             if (mediaTimeUs < 0) {
926                 ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
927                        mediaTimeUs / 1E6);
928                 mediaTimeUs = 0;
929             }
930             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
931             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
932         }
933 
934         size_t copy = entry->mBuffer->size() - entry->mOffset;
935         size_t sizeRemaining = size - sizeCopied;
936         if (copy > sizeRemaining) {
937             copy = sizeRemaining;
938         }
939 
940         memcpy((char *)buffer + sizeCopied,
941                entry->mBuffer->data() + entry->mOffset,
942                copy);
943 
944         entry->mOffset += copy;
945         if (entry->mOffset == entry->mBuffer->size()) {
946             entry->mNotifyConsumed->post();
947             mAudioQueue.erase(mAudioQueue.begin());
948             entry = NULL;
949         }
950         sizeCopied += copy;
951 
952         notifyIfMediaRenderingStarted_l();
953     }
954 
955     if (mAudioFirstAnchorTimeMediaUs >= 0) {
956         int64_t nowUs = ALooper::GetNowUs();
957         int64_t nowMediaUs =
958             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
959         // we don't know how much data we are queueing for offloaded tracks.
960         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
961     }
962 
963     // for non-offloaded audio, we need to compute the frames written because
964     // there is no EVENT_STREAM_END notification. The frames written gives
965     // an estimate on the pending played out duration.
966     if (!offloadingAudio()) {
967         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
968     }
969 
970     if (hasEOS) {
971         (new AMessage(kWhatStopAudioSink, this))->post();
972         // As there is currently no EVENT_STREAM_END callback notification for
973         // non-offloaded audio tracks, we need to post the EOS ourselves.
974         if (!offloadingAudio()) {
975             int64_t postEOSDelayUs = 0;
976             if (mAudioSink->needsTrailingPadding()) {
977                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
978             }
979             ALOGV("fillAudioBuffer: notifyEOS_l "
980                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
981                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
982             notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
983         }
984     }
985     return sizeCopied;
986 }
987 
drainAudioQueueUntilLastEOS()988 void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
989     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
990     bool foundEOS = false;
991     while (it != mAudioQueue.end()) {
992         int32_t eos;
993         QueueEntry *entry = &*it++;
994         if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
995                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
996             itEOS = it;
997             foundEOS = true;
998         }
999     }
1000 
1001     if (foundEOS) {
1002         // post all replies before EOS and drop the samples
1003         for (it = mAudioQueue.begin(); it != itEOS; it++) {
1004             if (it->mBuffer == nullptr) {
1005                 if (it->mNotifyConsumed == nullptr) {
1006                     // delay doesn't matter as we don't even have an AudioTrack
1007                     notifyEOS(true /* audio */, it->mFinalResult);
1008                 } else {
1009                     // TAG for re-opening audio sink.
1010                     onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
1011                 }
1012             } else {
1013                 it->mNotifyConsumed->post();
1014             }
1015         }
1016         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
1017     }
1018 }
1019 
onDrainAudioQueue()1020 bool NuPlayer::Renderer::onDrainAudioQueue() {
1021     // do not drain audio during teardown as queued buffers may be invalid.
1022     if (mAudioTornDown) {
1023         return false;
1024     }
1025     // TODO: This call to getPosition checks if AudioTrack has been created
1026     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
1027     // CHECKs on getPosition will fail.
1028     // We still need to figure out why AudioTrack is not created when
1029     // this function is called. One possible reason could be leftover
1030     // audio. Another possible place is to check whether decoder
1031     // has received INFO_FORMAT_CHANGED as the first buffer since
1032     // AudioSink is opened there, and possible interactions with flush
1033     // immediately after start. Investigate error message
1034     // "vorbis_dsp_synthesis returned -135", along with RTSP.
1035     uint32_t numFramesPlayed;
1036     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
1037         // When getPosition fails, renderer will not reschedule the draining
1038         // unless new samples are queued.
1039         // If we have pending EOS (or "eos" marker for discontinuities), we need
1040         // to post these now as NuPlayerDecoder might be waiting for it.
1041         drainAudioQueueUntilLastEOS();
1042 
1043         ALOGW("onDrainAudioQueue(): audio sink is not ready");
1044         return false;
1045     }
1046 
1047 #if 0
1048     ssize_t numFramesAvailableToWrite =
1049         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
1050 
1051     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
1052         ALOGI("audio sink underrun");
1053     } else {
1054         ALOGV("audio queue has %d frames left to play",
1055              mAudioSink->frameCount() - numFramesAvailableToWrite);
1056     }
1057 #endif
1058 
1059     uint32_t prevFramesWritten = mNumFramesWritten;
1060     while (!mAudioQueue.empty()) {
1061         QueueEntry *entry = &*mAudioQueue.begin();
1062 
1063         if (entry->mBuffer == NULL) {
1064             if (entry->mNotifyConsumed != nullptr) {
1065                 // TAG for re-open audio sink.
1066                 onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1067                 mAudioQueue.erase(mAudioQueue.begin());
1068                 continue;
1069             }
1070 
1071             // EOS
1072             if (mPaused) {
1073                 // Do not notify EOS when paused.
1074                 // This is needed to avoid switch to next clip while in pause.
1075                 ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
1076                 return false;
1077             }
1078 
1079             int64_t postEOSDelayUs = 0;
1080             if (mAudioSink->needsTrailingPadding()) {
1081                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1082             }
1083             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1084             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1085 
1086             mAudioQueue.erase(mAudioQueue.begin());
1087             entry = NULL;
1088             if (mAudioSink->needsTrailingPadding()) {
1089                 // If we're not in gapless playback (i.e. through setNextPlayer), we
1090                 // need to stop the track here, because that will play out the last
1091                 // little bit at the end of the file. Otherwise short files won't play.
1092                 mAudioSink->stop();
1093                 mNumFramesWritten = 0;
1094             }
1095             return false;
1096         }
1097 
1098         mLastAudioBufferDrained = entry->mBufferOrdinal;
1099 
1100         // ignore 0-sized buffer which could be EOS marker with no data
1101         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
1102             int64_t mediaTimeUs;
1103             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1104             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
1105                     mediaTimeUs / 1E6);
1106             onNewAudioMediaTime(mediaTimeUs);
1107         }
1108 
1109         size_t copy = entry->mBuffer->size() - entry->mOffset;
1110 
1111         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
1112                                             copy, false /* blocking */);
1113         if (written < 0) {
1114             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
1115             if (written == WOULD_BLOCK) {
1116                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
1117             } else {
1118                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
1119                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
1120                 // true, in which case the NuPlayer will handle the reconnect.
1121                 notifyAudioTearDown(kDueToError);
1122             }
1123             break;
1124         }
1125 
1126         entry->mOffset += written;
1127         size_t remainder = entry->mBuffer->size() - entry->mOffset;
1128         if ((ssize_t)remainder < mAudioSink->frameSize()) {
1129             if (remainder > 0) {
1130                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
1131                         remainder);
1132                 entry->mOffset += remainder;
1133                 copy -= remainder;
1134             }
1135 
1136             entry->mNotifyConsumed->post();
1137             mAudioQueue.erase(mAudioQueue.begin());
1138 
1139             entry = NULL;
1140         }
1141 
1142         size_t copiedFrames = written / mAudioSink->frameSize();
1143         mNumFramesWritten += copiedFrames;
1144 
1145         {
1146             Mutex::Autolock autoLock(mLock);
1147             int64_t maxTimeMedia;
1148             maxTimeMedia =
1149                 mAnchorTimeMediaUs +
1150                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
1151                                 * 1000LL * mAudioSink->msecsPerFrame());
1152             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
1153 
1154             notifyIfMediaRenderingStarted_l();
1155         }
1156 
1157         if (written != (ssize_t)copy) {
1158             // A short count was received from AudioSink::write()
1159             //
1160             // AudioSink write is called in non-blocking mode.
1161             // It may return with a short count when:
1162             //
1163             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
1164             //    discarded.
1165             // 2) The data to be copied exceeds the available buffer in AudioSink.
1166             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
1167             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
1168 
1169             // (Case 1)
1170             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
1171             // needs to fail, as we should not carry over fractional frames between calls.
1172             CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
1173 
1174             // (Case 2, 3, 4)
1175             // Return early to the caller.
1176             // Beware of calling immediately again as this may busy-loop if you are not careful.
1177             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
1178             break;
1179         }
1180     }
1181 
1182     // calculate whether we need to reschedule another write.
1183     bool reschedule = !mAudioQueue.empty()
1184             && (!mPaused
1185                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
1186     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
1187     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
1188     return reschedule;
1189 }
1190 
getDurationUsIfPlayedAtSampleRate(uint32_t numFrames)1191 int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
1192     int32_t sampleRate = offloadingAudio() ?
1193             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
1194     if (sampleRate == 0) {
1195         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
1196         return 0;
1197     }
1198 
1199     return (int64_t)(numFrames * 1000000LL / sampleRate);
1200 }
1201 
1202 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
getPendingAudioPlayoutDurationUs(int64_t nowUs)1203 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
1204     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1205     if (mUseVirtualAudioSink) {
1206         int64_t nowUs = ALooper::GetNowUs();
1207         int64_t mediaUs;
1208         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
1209             return 0LL;
1210         } else {
1211             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
1212         }
1213     }
1214 
1215     const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
1216     int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
1217     if (pendingUs < 0) {
1218         // This shouldn't happen unless the timestamp is stale.
1219         ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
1220                 "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
1221                 __func__, (long long)pendingUs,
1222                 (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
1223         pendingUs = 0;
1224     }
1225     return pendingUs;
1226 }
1227 
getRealTimeUs(int64_t mediaTimeUs,int64_t nowUs)1228 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
1229     int64_t realUs;
1230     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
1231         // If failed to get current position, e.g. due to audio clock is
1232         // not ready, then just play out video immediately without delay.
1233         return nowUs;
1234     }
1235     return realUs;
1236 }
1237 
onNewAudioMediaTime(int64_t mediaTimeUs)1238 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
1239     Mutex::Autolock autoLock(mLock);
1240     // TRICKY: vorbis decoder generates multiple frames with the same
1241     // timestamp, so only update on the first frame with a given timestamp
1242     if (mediaTimeUs == mAnchorTimeMediaUs) {
1243         return;
1244     }
1245     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
1246 
1247     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
1248     if (mNextAudioClockUpdateTimeUs == -1) {
1249         AudioTimestamp ts;
1250         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
1251             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
1252         }
1253     }
1254     int64_t nowUs = ALooper::GetNowUs();
1255     if (mNextAudioClockUpdateTimeUs >= 0) {
1256         if (nowUs >= mNextAudioClockUpdateTimeUs) {
1257             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
1258             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
1259             mUseVirtualAudioSink = false;
1260             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
1261         }
1262     } else {
1263         int64_t unused;
1264         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
1265                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
1266                         > kMaxAllowedAudioSinkDelayUs)) {
1267             // Enough data has been sent to AudioSink, but AudioSink has not rendered
1268             // any data yet. Something is wrong with AudioSink, e.g., the device is not
1269             // connected to audio out.
1270             // Switch to system clock. This essentially creates a virtual AudioSink with
1271             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
1272             // This virtual AudioSink renders audio data starting from the very first sample
1273             // and it's paced by system clock.
1274             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
1275             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
1276             mUseVirtualAudioSink = true;
1277         }
1278     }
1279     mAnchorNumFramesWritten = mNumFramesWritten;
1280     mAnchorTimeMediaUs = mediaTimeUs;
1281 }
1282 
1283 // Called without mLock acquired.
postDrainVideoQueue()1284 void NuPlayer::Renderer::postDrainVideoQueue() {
1285     if (mDrainVideoQueuePending
1286             || getSyncQueues()
1287             || (mPaused && mVideoSampleReceived)) {
1288         return;
1289     }
1290 
1291     if (mVideoQueue.empty()) {
1292         return;
1293     }
1294 
1295     QueueEntry &entry = *mVideoQueue.begin();
1296 
1297     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
1298     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
1299 
1300     if (entry.mBuffer == NULL) {
1301         // EOS doesn't carry a timestamp.
1302         msg->post();
1303         mDrainVideoQueuePending = true;
1304         return;
1305     }
1306 
1307     int64_t nowUs = ALooper::GetNowUs();
1308     if (mFlags & FLAG_REAL_TIME) {
1309         int64_t realTimeUs;
1310         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1311 
1312         realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1313 
1314         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1315 
1316         int64_t delayUs = realTimeUs - nowUs;
1317 
1318         ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
1319         // post 2 display refreshes before rendering is due
1320         msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
1321 
1322         mDrainVideoQueuePending = true;
1323         return;
1324     }
1325 
1326     int64_t mediaTimeUs;
1327     CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1328 
1329     {
1330         Mutex::Autolock autoLock(mLock);
1331         if (mAnchorTimeMediaUs < 0) {
1332             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
1333             mAnchorTimeMediaUs = mediaTimeUs;
1334         }
1335     }
1336     mNextVideoTimeMediaUs = mediaTimeUs;
1337     if (!mHasAudio) {
1338         // smooth out videos >= 10fps
1339         mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1340     }
1341 
1342     if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
1343         msg->post();
1344     } else {
1345         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1346 
1347         // post 2 display refreshes before rendering is due
1348         mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
1349     }
1350 
1351     mDrainVideoQueuePending = true;
1352 }
1353 
onDrainVideoQueue()1354 void NuPlayer::Renderer::onDrainVideoQueue() {
1355     if (mVideoQueue.empty()) {
1356         return;
1357     }
1358 
1359     QueueEntry *entry = &*mVideoQueue.begin();
1360 
1361     if (entry->mBuffer == NULL) {
1362         // EOS
1363 
1364         notifyEOS(false /* audio */, entry->mFinalResult);
1365 
1366         mVideoQueue.erase(mVideoQueue.begin());
1367         entry = NULL;
1368 
1369         setVideoLateByUs(0);
1370         return;
1371     }
1372 
1373     int64_t nowUs = ALooper::GetNowUs();
1374     int64_t realTimeUs;
1375     int64_t mediaTimeUs = -1;
1376     if (mFlags & FLAG_REAL_TIME) {
1377         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1378     } else {
1379         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1380 
1381         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
1382     }
1383     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1384 
1385     bool tooLate = false;
1386 
1387     if (!mPaused) {
1388         setVideoLateByUs(nowUs - realTimeUs);
1389         tooLate = (mVideoLateByUs > 40000);
1390 
1391         if (tooLate) {
1392             ALOGV("video late by %lld us (%.2f secs)",
1393                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
1394         } else {
1395             int64_t mediaUs = 0;
1396             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
1397             ALOGV("rendering video at media time %.2f secs",
1398                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
1399                     mediaUs) / 1E6);
1400 
1401             if (!(mFlags & FLAG_REAL_TIME)
1402                     && mLastAudioMediaTimeUs != -1
1403                     && mediaTimeUs > mLastAudioMediaTimeUs) {
1404                 // If audio ends before video, video continues to drive media clock.
1405                 // Also smooth out videos >= 10fps.
1406                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1407             }
1408         }
1409     } else {
1410         setVideoLateByUs(0);
1411         if (!mVideoSampleReceived && !mHasAudio) {
1412             // This will ensure that the first frame after a flush won't be used as anchor
1413             // when renderer is in paused state, because resume can happen any time after seek.
1414             clearAnchorTime();
1415         }
1416     }
1417 
1418     // Always render the first video frame while keeping stats on A/V sync.
1419     if (!mVideoSampleReceived) {
1420         realTimeUs = nowUs;
1421         tooLate = false;
1422     }
1423 
1424     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000LL);
1425     entry->mNotifyConsumed->setInt32("render", !tooLate);
1426     entry->mNotifyConsumed->post();
1427     mVideoQueue.erase(mVideoQueue.begin());
1428     entry = NULL;
1429 
1430     mVideoSampleReceived = true;
1431 
1432     if (!mPaused) {
1433         if (!mVideoRenderingStarted) {
1434             mVideoRenderingStarted = true;
1435             notifyVideoRenderingStart();
1436         }
1437         Mutex::Autolock autoLock(mLock);
1438         notifyIfMediaRenderingStarted_l();
1439     }
1440 }
1441 
notifyVideoRenderingStart()1442 void NuPlayer::Renderer::notifyVideoRenderingStart() {
1443     sp<AMessage> notify = mNotify->dup();
1444     notify->setInt32("what", kWhatVideoRenderingStart);
1445     notify->post();
1446 }
1447 
notifyEOS(bool audio,status_t finalResult,int64_t delayUs)1448 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
1449     Mutex::Autolock autoLock(mLock);
1450     notifyEOS_l(audio, finalResult, delayUs);
1451 }
1452 
notifyEOS_l(bool audio,status_t finalResult,int64_t delayUs)1453 void NuPlayer::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
1454     if (audio && delayUs > 0) {
1455         sp<AMessage> msg = new AMessage(kWhatEOS, this);
1456         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
1457         msg->setInt32("finalResult", finalResult);
1458         msg->post(delayUs);
1459         return;
1460     }
1461     sp<AMessage> notify = mNotify->dup();
1462     notify->setInt32("what", kWhatEOS);
1463     notify->setInt32("audio", static_cast<int32_t>(audio));
1464     notify->setInt32("finalResult", finalResult);
1465     notify->post(delayUs);
1466 
1467     if (audio) {
1468         // Video might outlive audio. Clear anchor to enable video only case.
1469         mAnchorTimeMediaUs = -1;
1470         mHasAudio = false;
1471         if (mNextVideoTimeMediaUs >= 0) {
1472             int64_t mediaUs = 0;
1473             int64_t nowUs = ALooper::GetNowUs();
1474             status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
1475             if (result == OK) {
1476                 if (mNextVideoTimeMediaUs > mediaUs) {
1477                     mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
1478                 }
1479             } else {
1480                 mMediaClock->updateAnchor(
1481                         mNextVideoTimeMediaUs, nowUs,
1482                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1483             }
1484         }
1485     }
1486 }
1487 
notifyAudioTearDown(AudioTearDownReason reason)1488 void NuPlayer::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
1489     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
1490     msg->setInt32("reason", reason);
1491     msg->post();
1492 }
1493 
onQueueBuffer(const sp<AMessage> & msg)1494 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
1495     int32_t audio;
1496     CHECK(msg->findInt32("audio", &audio));
1497 
1498     if (dropBufferIfStale(audio, msg)) {
1499         return;
1500     }
1501 
1502     if (audio) {
1503         mHasAudio = true;
1504     } else {
1505         mHasVideo = true;
1506     }
1507 
1508     if (mHasVideo) {
1509         if (mVideoScheduler == NULL) {
1510             mVideoScheduler = new VideoFrameScheduler();
1511             mVideoScheduler->init();
1512         }
1513     }
1514 
1515     sp<RefBase> obj;
1516     CHECK(msg->findObject("buffer", &obj));
1517     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
1518 
1519     sp<AMessage> notifyConsumed;
1520     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
1521 
1522     QueueEntry entry;
1523     entry.mBuffer = buffer;
1524     entry.mNotifyConsumed = notifyConsumed;
1525     entry.mOffset = 0;
1526     entry.mFinalResult = OK;
1527     entry.mBufferOrdinal = ++mTotalBuffersQueued;
1528 
1529     if (audio) {
1530         Mutex::Autolock autoLock(mLock);
1531         mAudioQueue.push_back(entry);
1532         postDrainAudioQueue_l();
1533     } else {
1534         mVideoQueue.push_back(entry);
1535         postDrainVideoQueue();
1536     }
1537 
1538     Mutex::Autolock autoLock(mLock);
1539     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
1540         return;
1541     }
1542 
1543     sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
1544     sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
1545 
1546     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
1547         // EOS signalled on either queue.
1548         syncQueuesDone_l();
1549         return;
1550     }
1551 
1552     int64_t firstAudioTimeUs;
1553     int64_t firstVideoTimeUs;
1554     CHECK(firstAudioBuffer->meta()
1555             ->findInt64("timeUs", &firstAudioTimeUs));
1556     CHECK(firstVideoBuffer->meta()
1557             ->findInt64("timeUs", &firstVideoTimeUs));
1558 
1559     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
1560 
1561     ALOGV("queueDiff = %.2f secs", diff / 1E6);
1562 
1563     if (diff > 100000LL) {
1564         // Audio data starts More than 0.1 secs before video.
1565         // Drop some audio.
1566 
1567         (*mAudioQueue.begin()).mNotifyConsumed->post();
1568         mAudioQueue.erase(mAudioQueue.begin());
1569         return;
1570     }
1571 
1572     syncQueuesDone_l();
1573 }
1574 
syncQueuesDone_l()1575 void NuPlayer::Renderer::syncQueuesDone_l() {
1576     if (!mSyncQueues) {
1577         return;
1578     }
1579 
1580     mSyncQueues = false;
1581 
1582     if (!mAudioQueue.empty()) {
1583         postDrainAudioQueue_l();
1584     }
1585 
1586     if (!mVideoQueue.empty()) {
1587         mLock.unlock();
1588         postDrainVideoQueue();
1589         mLock.lock();
1590     }
1591 }
1592 
onQueueEOS(const sp<AMessage> & msg)1593 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1594     int32_t audio;
1595     CHECK(msg->findInt32("audio", &audio));
1596 
1597     if (dropBufferIfStale(audio, msg)) {
1598         return;
1599     }
1600 
1601     int32_t finalResult;
1602     CHECK(msg->findInt32("finalResult", &finalResult));
1603 
1604     QueueEntry entry;
1605     entry.mOffset = 0;
1606     entry.mFinalResult = finalResult;
1607 
1608     if (audio) {
1609         Mutex::Autolock autoLock(mLock);
1610         if (mAudioQueue.empty() && mSyncQueues) {
1611             syncQueuesDone_l();
1612         }
1613         mAudioQueue.push_back(entry);
1614         postDrainAudioQueue_l();
1615     } else {
1616         if (mVideoQueue.empty() && getSyncQueues()) {
1617             Mutex::Autolock autoLock(mLock);
1618             syncQueuesDone_l();
1619         }
1620         mVideoQueue.push_back(entry);
1621         postDrainVideoQueue();
1622     }
1623 }
1624 
onFlush(const sp<AMessage> & msg)1625 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1626     int32_t audio, notifyComplete;
1627     CHECK(msg->findInt32("audio", &audio));
1628 
1629     {
1630         Mutex::Autolock autoLock(mLock);
1631         if (audio) {
1632             notifyComplete = mNotifyCompleteAudio;
1633             mNotifyCompleteAudio = false;
1634             mLastAudioMediaTimeUs = -1;
1635 
1636             mHasAudio = false;
1637             if (mNextVideoTimeMediaUs >= 0) {
1638                 int64_t nowUs = ALooper::GetNowUs();
1639                 mMediaClock->updateAnchor(
1640                         mNextVideoTimeMediaUs, nowUs,
1641                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1642             }
1643         } else {
1644             notifyComplete = mNotifyCompleteVideo;
1645             mNotifyCompleteVideo = false;
1646         }
1647 
1648         // If we're currently syncing the queues, i.e. dropping audio while
1649         // aligning the first audio/video buffer times and only one of the
1650         // two queues has data, we may starve that queue by not requesting
1651         // more buffers from the decoder. If the other source then encounters
1652         // a discontinuity that leads to flushing, we'll never find the
1653         // corresponding discontinuity on the other queue.
1654         // Therefore we'll stop syncing the queues if at least one of them
1655         // is flushed.
1656         syncQueuesDone_l();
1657     }
1658     clearAnchorTime();
1659 
1660     ALOGV("flushing %s", audio ? "audio" : "video");
1661     if (audio) {
1662         {
1663             Mutex::Autolock autoLock(mLock);
1664             flushQueue(&mAudioQueue);
1665 
1666             ++mAudioDrainGeneration;
1667             ++mAudioEOSGeneration;
1668             prepareForMediaRenderingStart_l();
1669 
1670             // the frame count will be reset after flush.
1671             clearAudioFirstAnchorTime_l();
1672         }
1673 
1674         mDrainAudioQueuePending = false;
1675 
1676         if (offloadingAudio()) {
1677             mAudioSink->pause();
1678             mAudioSink->flush();
1679             if (!mPaused) {
1680                 mAudioSink->start();
1681             }
1682         } else {
1683             mAudioSink->pause();
1684             mAudioSink->flush();
1685             // Call stop() to signal to the AudioSink to completely fill the
1686             // internal buffer before resuming playback.
1687             // FIXME: this is ignored after flush().
1688             mAudioSink->stop();
1689             if (!mPaused) {
1690                 mAudioSink->start();
1691             }
1692             mNumFramesWritten = 0;
1693         }
1694         mNextAudioClockUpdateTimeUs = -1;
1695     } else {
1696         flushQueue(&mVideoQueue);
1697 
1698         mDrainVideoQueuePending = false;
1699 
1700         if (mVideoScheduler != NULL) {
1701             mVideoScheduler->restart();
1702         }
1703 
1704         Mutex::Autolock autoLock(mLock);
1705         ++mVideoDrainGeneration;
1706         prepareForMediaRenderingStart_l();
1707     }
1708 
1709     mVideoSampleReceived = false;
1710 
1711     if (notifyComplete) {
1712         notifyFlushComplete(audio);
1713     }
1714 }
1715 
flushQueue(List<QueueEntry> * queue)1716 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1717     while (!queue->empty()) {
1718         QueueEntry *entry = &*queue->begin();
1719 
1720         if (entry->mBuffer != NULL) {
1721             entry->mNotifyConsumed->post();
1722         } else if (entry->mNotifyConsumed != nullptr) {
1723             // Is it needed to open audio sink now?
1724             onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1725         }
1726 
1727         queue->erase(queue->begin());
1728         entry = NULL;
1729     }
1730 }
1731 
notifyFlushComplete(bool audio)1732 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1733     sp<AMessage> notify = mNotify->dup();
1734     notify->setInt32("what", kWhatFlushComplete);
1735     notify->setInt32("audio", static_cast<int32_t>(audio));
1736     notify->post();
1737 }
1738 
dropBufferIfStale(bool audio,const sp<AMessage> & msg)1739 bool NuPlayer::Renderer::dropBufferIfStale(
1740         bool audio, const sp<AMessage> &msg) {
1741     int32_t queueGeneration;
1742     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
1743 
1744     if (queueGeneration == getQueueGeneration(audio)) {
1745         return false;
1746     }
1747 
1748     sp<AMessage> notifyConsumed;
1749     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1750         notifyConsumed->post();
1751     }
1752 
1753     return true;
1754 }
1755 
onAudioSinkChanged()1756 void NuPlayer::Renderer::onAudioSinkChanged() {
1757     if (offloadingAudio()) {
1758         return;
1759     }
1760     CHECK(!mDrainAudioQueuePending);
1761     mNumFramesWritten = 0;
1762     mAnchorNumFramesWritten = -1;
1763     uint32_t written;
1764     if (mAudioSink->getFramesWritten(&written) == OK) {
1765         mNumFramesWritten = written;
1766     }
1767 }
1768 
onDisableOffloadAudio()1769 void NuPlayer::Renderer::onDisableOffloadAudio() {
1770     Mutex::Autolock autoLock(mLock);
1771     mFlags &= ~FLAG_OFFLOAD_AUDIO;
1772     ++mAudioDrainGeneration;
1773     if (mAudioRenderingStartGeneration != -1) {
1774         prepareForMediaRenderingStart_l();
1775         // PauseTimeout is applied to offload mode only. Cancel pending timer.
1776         cancelAudioOffloadPauseTimeout();
1777     }
1778 }
1779 
onEnableOffloadAudio()1780 void NuPlayer::Renderer::onEnableOffloadAudio() {
1781     Mutex::Autolock autoLock(mLock);
1782     mFlags |= FLAG_OFFLOAD_AUDIO;
1783     ++mAudioDrainGeneration;
1784     if (mAudioRenderingStartGeneration != -1) {
1785         prepareForMediaRenderingStart_l();
1786     }
1787 }
1788 
onPause()1789 void NuPlayer::Renderer::onPause() {
1790     if (mPaused) {
1791         return;
1792     }
1793 
1794     {
1795         Mutex::Autolock autoLock(mLock);
1796         // we do not increment audio drain generation so that we fill audio buffer during pause.
1797         ++mVideoDrainGeneration;
1798         prepareForMediaRenderingStart_l();
1799         mPaused = true;
1800         mMediaClock->setPlaybackRate(0.0);
1801     }
1802 
1803     mDrainAudioQueuePending = false;
1804     mDrainVideoQueuePending = false;
1805 
1806     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1807     mAudioSink->pause();
1808     startAudioOffloadPauseTimeout();
1809 
1810     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
1811           mAudioQueue.size(), mVideoQueue.size());
1812 }
1813 
onResume()1814 void NuPlayer::Renderer::onResume() {
1815     if (!mPaused) {
1816         return;
1817     }
1818 
1819     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1820     cancelAudioOffloadPauseTimeout();
1821     if (mAudioSink->ready()) {
1822         status_t err = mAudioSink->start();
1823         if (err != OK) {
1824             ALOGE("cannot start AudioSink err %d", err);
1825             notifyAudioTearDown(kDueToError);
1826         }
1827     }
1828 
1829     {
1830         Mutex::Autolock autoLock(mLock);
1831         mPaused = false;
1832         // rendering started message may have been delayed if we were paused.
1833         if (mRenderingDataDelivered) {
1834             notifyIfMediaRenderingStarted_l();
1835         }
1836         // configure audiosink as we did not do it when pausing
1837         if (mAudioSink != NULL && mAudioSink->ready()) {
1838             mAudioSink->setPlaybackRate(mPlaybackSettings);
1839         }
1840 
1841         mMediaClock->setPlaybackRate(mPlaybackRate);
1842 
1843         if (!mAudioQueue.empty()) {
1844             postDrainAudioQueue_l();
1845         }
1846     }
1847 
1848     if (!mVideoQueue.empty()) {
1849         postDrainVideoQueue();
1850     }
1851 }
1852 
onSetVideoFrameRate(float fps)1853 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1854     if (mVideoScheduler == NULL) {
1855         mVideoScheduler = new VideoFrameScheduler();
1856     }
1857     mVideoScheduler->init(fps);
1858 }
1859 
getQueueGeneration(bool audio)1860 int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
1861     Mutex::Autolock autoLock(mLock);
1862     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
1863 }
1864 
getDrainGeneration(bool audio)1865 int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
1866     Mutex::Autolock autoLock(mLock);
1867     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
1868 }
1869 
getSyncQueues()1870 bool NuPlayer::Renderer::getSyncQueues() {
1871     Mutex::Autolock autoLock(mLock);
1872     return mSyncQueues;
1873 }
1874 
onAudioTearDown(AudioTearDownReason reason)1875 void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
1876     if (mAudioTornDown) {
1877         return;
1878     }
1879 
1880     // TimeoutWhenPaused is only for offload mode.
1881     if (reason == kDueToTimeout && !offloadingAudio()) {
1882         return;
1883     }
1884 
1885     mAudioTornDown = true;
1886 
1887     int64_t currentPositionUs;
1888     sp<AMessage> notify = mNotify->dup();
1889     if (getCurrentPosition(&currentPositionUs) == OK) {
1890         notify->setInt64("positionUs", currentPositionUs);
1891     }
1892 
1893     mAudioSink->stop();
1894     mAudioSink->flush();
1895 
1896     notify->setInt32("what", kWhatAudioTearDown);
1897     notify->setInt32("reason", reason);
1898     notify->post();
1899 }
1900 
startAudioOffloadPauseTimeout()1901 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1902     if (offloadingAudio()) {
1903         mWakeLock->acquire();
1904         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
1905         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
1906         msg->post(kOffloadPauseMaxUs);
1907     }
1908 }
1909 
cancelAudioOffloadPauseTimeout()1910 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1911     // We may have called startAudioOffloadPauseTimeout() without
1912     // the AudioSink open and with offloadingAudio enabled.
1913     //
1914     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
1915     // we always release the wakelock and increment the pause timeout generation.
1916     //
1917     // Note: The acquired wakelock prevents the device from suspending
1918     // immediately after offload pause (in case a resume happens shortly thereafter).
1919     mWakeLock->release(true);
1920     ++mAudioOffloadPauseTimeoutGeneration;
1921 }
1922 
onOpenAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming)1923 status_t NuPlayer::Renderer::onOpenAudioSink(
1924         const sp<AMessage> &format,
1925         bool offloadOnly,
1926         bool hasVideo,
1927         uint32_t flags,
1928         bool isStreaming) {
1929     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1930             offloadOnly, offloadingAudio());
1931     bool audioSinkChanged = false;
1932 
1933     int32_t numChannels;
1934     CHECK(format->findInt32("channel-count", &numChannels));
1935 
1936     int32_t rawChannelMask;
1937     audio_channel_mask_t channelMask =
1938             format->findInt32("channel-mask", &rawChannelMask) ?
1939                     static_cast<audio_channel_mask_t>(rawChannelMask)
1940                     // signal to the AudioSink to derive the mask from count.
1941                     : CHANNEL_MASK_USE_CHANNEL_ORDER;
1942 
1943     int32_t sampleRate;
1944     CHECK(format->findInt32("sample-rate", &sampleRate));
1945 
1946     // read pcm encoding from MediaCodec output format, if available
1947     int32_t pcmEncoding;
1948     audio_format_t audioFormat =
1949             format->findInt32(KEY_PCM_ENCODING, &pcmEncoding) ?
1950                     audioFormatFromEncoding(pcmEncoding) : AUDIO_FORMAT_PCM_16_BIT;
1951 
1952     if (offloadingAudio()) {
1953         AString mime;
1954         CHECK(format->findString("mime", &mime));
1955         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1956 
1957         if (err != OK) {
1958             ALOGE("Couldn't map mime \"%s\" to a valid "
1959                     "audio_format", mime.c_str());
1960             onDisableOffloadAudio();
1961         } else {
1962             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
1963                     mime.c_str(), audioFormat);
1964 
1965             int avgBitRate = 0;
1966             format->findInt32("bitrate", &avgBitRate);
1967 
1968             int32_t aacProfile = -1;
1969             if (audioFormat == AUDIO_FORMAT_AAC
1970                     && format->findInt32("aac-profile", &aacProfile)) {
1971                 // Redefine AAC format as per aac profile
1972                 mapAACProfileToAudioFormat(
1973                         audioFormat,
1974                         aacProfile);
1975             }
1976 
1977             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
1978             offloadInfo.duration_us = -1;
1979             format->findInt64(
1980                     "durationUs", &offloadInfo.duration_us);
1981             offloadInfo.sample_rate = sampleRate;
1982             offloadInfo.channel_mask = channelMask;
1983             offloadInfo.format = audioFormat;
1984             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
1985             offloadInfo.bit_rate = avgBitRate;
1986             offloadInfo.has_video = hasVideo;
1987             offloadInfo.is_streaming = isStreaming;
1988 
1989             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
1990                 ALOGV("openAudioSink: no change in offload mode");
1991                 // no change from previous configuration, everything ok.
1992                 return OK;
1993             }
1994             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1995 
1996             ALOGV("openAudioSink: try to open AudioSink in offload mode");
1997             uint32_t offloadFlags = flags;
1998             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1999             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
2000             audioSinkChanged = true;
2001             mAudioSink->close();
2002 
2003             err = mAudioSink->open(
2004                     sampleRate,
2005                     numChannels,
2006                     (audio_channel_mask_t)channelMask,
2007                     audioFormat,
2008                     0 /* bufferCount - unused */,
2009                     &NuPlayer::Renderer::AudioSinkCallback,
2010                     this,
2011                     (audio_output_flags_t)offloadFlags,
2012                     &offloadInfo);
2013 
2014             if (err == OK) {
2015                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
2016             }
2017 
2018             if (err == OK) {
2019                 // If the playback is offloaded to h/w, we pass
2020                 // the HAL some metadata information.
2021                 // We don't want to do this for PCM because it
2022                 // will be going through the AudioFlinger mixer
2023                 // before reaching the hardware.
2024                 // TODO
2025                 mCurrentOffloadInfo = offloadInfo;
2026                 if (!mPaused) { // for preview mode, don't start if paused
2027                     err = mAudioSink->start();
2028                 }
2029                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
2030             }
2031             if (err != OK) {
2032                 // Clean up, fall back to non offload mode.
2033                 mAudioSink->close();
2034                 onDisableOffloadAudio();
2035                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2036                 ALOGV("openAudioSink: offload failed");
2037                 if (offloadOnly) {
2038                     notifyAudioTearDown(kForceNonOffload);
2039                 }
2040             } else {
2041                 mUseAudioCallback = true;  // offload mode transfers data through callback
2042                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2043             }
2044         }
2045     }
2046     if (!offloadOnly && !offloadingAudio()) {
2047         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
2048         uint32_t pcmFlags = flags;
2049         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2050 
2051         const PcmInfo info = {
2052                 (audio_channel_mask_t)channelMask,
2053                 (audio_output_flags_t)pcmFlags,
2054                 audioFormat,
2055                 numChannels,
2056                 sampleRate
2057         };
2058         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
2059             ALOGV("openAudioSink: no change in pcm mode");
2060             // no change from previous configuration, everything ok.
2061             return OK;
2062         }
2063 
2064         audioSinkChanged = true;
2065         mAudioSink->close();
2066         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2067         // Note: It is possible to set up the callback, but not use it to send audio data.
2068         // This requires a fix in AudioSink to explicitly specify the transfer mode.
2069         mUseAudioCallback = getUseAudioCallbackSetting();
2070         if (mUseAudioCallback) {
2071             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2072         }
2073 
2074         // Compute the desired buffer size.
2075         // For callback mode, the amount of time before wakeup is about half the buffer size.
2076         const uint32_t frameCount =
2077                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
2078 
2079         // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
2080         // AudioSink. We don't want this when there's video because it will cause a video seek to
2081         // the previous I frame. But we do want this when there's only audio because it will give
2082         // NuPlayer a chance to switch from non-offload mode to offload mode.
2083         // So we only set doNotReconnect when there's no video.
2084         const bool doNotReconnect = !hasVideo;
2085 
2086         // We should always be able to set our playback settings if the sink is closed.
2087         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
2088                 "onOpenAudioSink: can't set playback rate on closed sink");
2089         status_t err = mAudioSink->open(
2090                     sampleRate,
2091                     numChannels,
2092                     (audio_channel_mask_t)channelMask,
2093                     audioFormat,
2094                     0 /* bufferCount - unused */,
2095                     mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
2096                     mUseAudioCallback ? this : NULL,
2097                     (audio_output_flags_t)pcmFlags,
2098                     NULL,
2099                     doNotReconnect,
2100                     frameCount);
2101         if (err != OK) {
2102             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
2103             mAudioSink->close();
2104             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2105             return err;
2106         }
2107         mCurrentPcmInfo = info;
2108         if (!mPaused) { // for preview mode, don't start if paused
2109             mAudioSink->start();
2110         }
2111     }
2112     if (audioSinkChanged) {
2113         onAudioSinkChanged();
2114     }
2115     mAudioTornDown = false;
2116     return OK;
2117 }
2118 
onCloseAudioSink()2119 void NuPlayer::Renderer::onCloseAudioSink() {
2120     mAudioSink->close();
2121     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2122     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2123 }
2124 
onChangeAudioFormat(const sp<AMessage> & meta,const sp<AMessage> & notify)2125 void NuPlayer::Renderer::onChangeAudioFormat(
2126         const sp<AMessage> &meta, const sp<AMessage> &notify) {
2127     sp<AMessage> format;
2128     CHECK(meta->findMessage("format", &format));
2129 
2130     int32_t offloadOnly;
2131     CHECK(meta->findInt32("offload-only", &offloadOnly));
2132 
2133     int32_t hasVideo;
2134     CHECK(meta->findInt32("has-video", &hasVideo));
2135 
2136     uint32_t flags;
2137     CHECK(meta->findInt32("flags", (int32_t *)&flags));
2138 
2139     uint32_t isStreaming;
2140     CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
2141 
2142     status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
2143 
2144     if (err != OK) {
2145         notify->setInt32("err", err);
2146     }
2147     notify->post();
2148 }
2149 
2150 }  // namespace android
2151